[
  {
    "path": ".github/workflows/ci.yml",
    "content": "name: CI\n\non:\n  push:\n    branches: [\"master\"]\n  pull_request:\n    branches: [\"master\"]\n\npermissions:\n  contents: read\n\njobs:\n  ci:\n    runs-on: ubuntu-latest\n    strategy:\n      fail-fast: false\n    steps:\n      - name: Checkout current branch\n        uses: actions/checkout@v6.0.2\n      - name: Setup Java\n        uses: actions/setup-java@v5.2.0\n        with:\n          distribution: temurin\n          java-version: 25\n          check-latest: true\n      - name: Setup sbt\n        uses: sbt/setup-sbt@v1\n      - name: Cache scala dependencies\n        uses: coursier/cache-action@v8\n      - name: Run tests\n        run: sbt 'scalafmtCheckAll; test'\n"
  },
  {
    "path": ".gitignore",
    "content": ".settings\n.DS_Store\n\n*.iml\n.idea\ntarget\nnbproject\nnb-configuration.xml\n\n.class\n.log\n.jar\n.war\n.ear\n.zip\n.tar.gz\n.rar\n\nhs_err_pid*\n\n*.log\n\n.bsp\n\n*.sc\n*.db\n\n*metals*\n.bloop\n.vscode"
  },
  {
    "path": ".scalafmt.conf",
    "content": "version = 3.8.4-RC3\nrunner.dialect = scala3\nrewrite.scala3.insertEndMarkerMinLines = 20\nrewrite.scala3.removeEndMarkerMaxLines = 19\nbinPack.parentConstructors = Oneline\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License."
  },
  {
    "path": "README.md",
    "content": "## Magnum\n\n[![Latest version](https://index.scala-lang.org/augustnagro/magnum/magnum/latest.svg?color=orange)](https://index.scala-lang.org/augustnagro/magnum/magnum)\n\nYet another database client for Scala. No dependencies, high productivity.\n\n* [Installing](#installing)\n* [ScalaDoc](#scaladoc)\n* [Documentation](#documentation)\n  * [`connect` creates a database connection](#connect-creates-a-database-connection)\n  * [`transact` creates a database transaction](#transact-creates-a-database-transaction)\n  * [Type-safe Transaction & Connection Management](#type-safe-transaction--connection-management)\n  * [Customizing Transactions](#customizing-transactions)\n  * [Sql Interpolator, Frag, Query, Update, Returning](#sql-interpolator-frag-query-and-update)\n  * [Batch Updates](#batch-updates)\n  * [Immutable Repositories](#immutable-repositories)\n  * [Repositories](#repositories)\n  * [Database generated columns](#database-generated-columns)\n  * [Specifications](#specifications)\n  * [Scala 3 Enum & NewType Support](#scala-3-enum--newtype-support)\n  * [`DbCodec`: Typeclass for JDBC reading & writing](#dbcodec-typeclass-for-jdbc-reading--writing)\n  * [Future-Proof Queries](#future-proof-queries)\n  * [Splicing Literal Values into Frags](#splicing-literal-values-into-frags)\n  * [Postgres Module](#postgres-module)\n  * [Logging](#logging-sql-queries)\n* [Integrations](#integrations)\n  * [ZIO](#zio) \n* [Motivation](#motivation)\n* [Feature List And Database Support](#feature-list)\n* [Talks and Blogs](#talks-and-blogs)\n* [Frequently Asked Questions](#frequently-asked-questions)\n\n## Installing\n\n```\n\"com.augustnagro\" %% \"magnum\" % \"1.3.0\"\n```\n\nMagnum requires Scala >= 3.3.0\n\nYou must also install the JDBC driver for your database, for example:\n\n```\n\"org.postgresql\" % \"postgresql\" % \"<version>\"\n```\n\nAnd for performance, a JDBC connection pool like [HikariCP](https://github.com/brettwooldridge/HikariCP)\n\n## ScalaDoc\n\nhttps://javadoc.io/doc/com.augustnagro/magnum_3\n\n## Documentation\n\n### `connect` creates a database connection.\n\n`connect` takes two parameters; the database Transactor,\nand a context function with a given `DbCon` connection.\nFor example:\n\n```scala\nimport com.augustnagro.magnum.*\n\nval dataSource: javax.sql.DataSource = ???\nval xa = Transactor(dataSource)\n\nval users: Vector[User] = connect(xa):\n  sql\"SELECT * FROM user\".query[User].run()\n```\n\n### `transact` creates a database transaction.\n\nLike `connect`, `transact` accepts a Transactor and context function.\nThe context function provides a `DbTx` instance.\nIf the function throws, the transaction will be rolled back.\n\n```scala\n// update is rolled back\ntransact(xa):\n  sql\"UPDATE user SET first_name = $firstName WHERE id = $id\".update.run()\n  thisMethodThrows()\n```\n\n### Type-safe Transaction & Connection Management\n\nAnnotate transactional methods with `using DbTx`, and connections with `using DbCon`.\n\nSince `DbTx <: DbCon`, it's impossible to call a method with the wrong context.\n\nFor example, this compiles:\n\n```scala\ndef runUpdateAndGetUsers()(using DbTx): Vector[User] =\n  userRepo.deleteById(1L)\n  getUsers\n\ndef getUsers(using DbCon): Vector[User] =\n  sql\"SELECT * FROM user\".query.run()\n```\n\nBut not this:\n\n```scala\ndef runSomeQueries(using DbCon): Vector[User] =\n  runUpdateAndGetUsers()\n```\n\n### Customizing transactions\n\n`Transactor` lets you customize the transaction (or connection) behavior.\n\n```scala\nval xa = Transactor(\n  dataSource = ???,\n  sqlLogger = SqlLogger.logSlowQueries(500.milliseconds),\n  connectionConfig = con =>\n    con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ)\n)\n\ntransact(xa):\n  sql\"SELECT id from myUser\".query[Long].run()\n```\n\n### Sql Interpolator, Frag, Query, and Update\n\nThe `sql` interpolator can express any SQL expression, returning a `Frag` sql fragment. You can interpolate values without the risk of SQL-injection attacks.\n\n```scala\nval firstNameOpt = Some(\"John\")\nval twoDaysAgo = OffsetDateTime.now.minusDays(2)\n\nval frag: Frag =\n  sql\"\"\"\n    SELECT id, last_name FROM user\n    WHERE first_name = $firstNameOpt\n    AND created <= $twoDaysAgo\n    \"\"\"\n```\n\nFrags can be turned into queries with the `query[T](using DbCodec[T])` method:\n\n```scala\nval query = frag.query[(Long, String)] // Query[(Long, String)]\n```\n\nOr updates via `update`\n\n```scala\nval update: Update =\n  sql\"UPDATE user SET first_name = 'Buddha' WHERE id = 3\".update\n```\n\nOr an update with a `RETURNING` clause via `returning`:\n\n```scala\nval updateReturning: Returning =\n  sql\"\"\"\n     UPDATE user SET first_name = 'Buddha'\n     WHERE last_name = 'Harper'\n     RETURNING id\n     \"\"\".returning[Long]\n```\n\nAll are executed via `run()(using DbCon)`:\n\n```scala\ntransact(xa):\n  val tuples: Vector[(Long, String)] = query.run()\n  val updatedRows: Int = update.run()\n  val updatedIds: Vector[Long] = updateReturning.run()\n```\n\n### Batch Updates\n\nBatch updates are supported via `batchUpdate` method in package `com.augustnagro.magnum`.\n\n```scala\nconnect(xa):\n  val users: Iterable[User] = ???\n  val updateResult: BatchUpdateResult =\n    batchUpdate(users): user =>\n      sql\"...\".update\n```\n\n`batchUpdate` returns a `BatchUpdateResult` enum, which is `Success(numRowsUpdated)` or `SuccessNoInfo` otherwise.\n\n### Immutable Repositories\n\nThe `ImmutableRepo` class auto-generates the following methods at compile-time:\n\n```scala\n  def count(using DbCon): Long\n  def existsById(id: ID)(using DbCon): Boolean\n  def findAll(using DbCon): Vector[E]\n  def findAll(spec: Spec[E])(using DbCon): Vector[E]\n  def findById(id: ID)(using DbCon): Option[E]\n  def findAllById(ids: Iterable[ID])(using DbCon): Vector[E]\n```\n\nHere's an example:\n\n```scala\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class User(\n  @Id id: Long,\n  firstName: Option[String],\n  lastName: String,\n  created: OffsetDateTime\n) derives DbCodec\n\nval userRepo = ImmutableRepo[User, Long]\n\ntransact(xa):\n  val cnt = userRepo.count\n  val userOpt = userRepo.findById(2L)\n```\n\nImportantly, class User is annotated with `@Table`, which defines the table's database type. The annotation optionally specifies the name-mapping between scala fields and column names. You can also use the `@SqlName` annotation on individual fields. Finally, The table must `derive DbCodec`, or otherwise provide an implicit DbCodec instance.\n\nThe optional `@Id` annotation denotes the table's primary key. Not setting `@Id` will default to using the first field. If there is no logical id, then remove the annotation and use Null in the ID type parameter of Repositories (see next).\n\nIt is a best practice to extend ImmutableRepo to encapsulate your SQL in repositories. This way, it's easier to maintain since they're grouped together.\n\n```scala\nclass UserRepo extends ImmutableRepo[User, Long]:\n  def firstNamesForLast(lastName: String)(using DbCon): Vector[String] =\n    sql\"\"\"\n      SELECT DISTINCT first_name\n      FROM user\n      WHERE last_name = $lastName\n      \"\"\".query[String].run()\n        \n  // other User-related queries here\n```\n\n### Repositories\n\nThe `Repo` class auto-generates the following methods at compile-time:\n\n```scala\n  def count(using DbCon): Long\n  def existsById(id: ID)(using DbCon): Boolean\n  def findAll(using DbCon): Vector[E]\n  def findAll(spec: Spec[E])(using DbCon): Vector[E]\n  def findById(id: ID)(using DbCon): Option[E]\n  def findAllById(ids: Iterable[ID])(using DbCon): Vector[E]\n  \n  def delete(entity: E)(using DbCon): Unit\n  def deleteById(id: ID)(using DbCon): Unit\n  def truncate()(using DbCon): Unit\n  def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult\n  def deleteAllById(ids: Iterable[ID])(using DbCon): BatchUpdateResult\n  def insert(entityCreator: EC)(using DbCon): Unit\n  def insertAll(entityCreators: Iterable[EC])(using DbCon): Unit\n  def insertReturning(entityCreator: EC)(using DbCon): E\n  def insertAllReturning(entityCreators: Iterable[EC])(using DbCon): Vector[E]\n  def update(entity: E)(using DbCon): Unit\n  def updateAll(entities: Iterable[E])(using DbCon): BatchUpdateResult\n```\n\nHere's an example:\n\n```scala\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class User(\n  @Id id: Long,\n  firstName: Option[String],\n  lastName: String,\n  created: OffsetDateTime\n) derives DbCodec\n\nval userRepo = Repo[User, User, Long]\n\nval countAfterUpdate = transact(xa):\n  userRepo.deleteById(2L)\n  userRepo.count\n```\n\nIt is a best practice to encapsulate your SQL in repositories.\n\n```scala\nclass UserRepo extends Repo[User, User, Long]\n```\n\nAlso note that Repo extends ImmutableRepo. Some databases cannot support every method, and will throw UnsupportedOperationException.\n\n### Database generated columns\n\nIt is often the case that database columns are auto-generated, for example, primary key IDs. This is why the Repo class has 3 type parameters. \n\nThe first defines the Entity-Creator, which should omit any fields that are auto-generated. The entity-creator class must be an 'effective' subclass of the entity class, but it does not have to subclass the entity. This is verified at compile time.\n\nThe second type parameter is the Entity class, and the final is for the ID. If the Entity does not have a logical ID, use Null.\n\n```scala\ncase class UserCreator(\n  firstName: Option[String],\n  lastName: String,\n) derives DbCodec\n\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class User(\n  @Id id: Long,\n  firstName: Option[String],\n  lastName: String,\n  created: OffsetDateTime\n) derives DbCodec\n\nval userRepo = Repo[UserCreator, User, Long]\n\nval newUser: User = transact(xa):\n  userRepo.insertReturning(\n    UserCreator(Some(\"Adam\"), \"Smith\")\n  )\n```\n\n### Specifications\n\nSpecifications help you write safe, dynamic queries.\nAn example use-case would be a search results page that allows users to sort and filter the paginated data.\n\n1. If you need to perform joins to get the data needed, first create a database view.\n2. Next, create an entity class that derives DbCodec.\n3. Finally, use the Spec class to create a specification.\n\nHere's an example:\n\n```scala\nval partialName = \"Ja%\"\nval lastNameOpt = Option(\"Brown\")\nval searchDate = OffsetDateTime.now.minusDays(2)\nval idPosition = 42L\n\nval spec = Spec[User]\n  .where(sql\"first_name ILIKE $partialName\")\n  .where(lastNameOpt.map(ln => sql\"last_name = $ln\").getOrElse(sql\"\"))\n  .where(sql\"created >= $searchDate\")\n  .seek(\"id\", SeekDir.Gt, idPosition, SortOrder.Asc)\n  .limit(10)\n\nval users: Vector[User] = userRepo.findAll(spec)\n```\n\nNote that both [seek pagination](https://blog.jooq.org/faster-sql-paging-with-jooq-using-the-seek-method/) and offset pagination is supported.\n\n### Scala 3 Enum & NewType Support\n\nMagnum supports Scala 3 enums (non-adt) fully, by default writing & reading them as Strings. For example,\n\n```scala\n@Table(PostgresDbType, SqlNameMapper.CamelToUpperSnakeCase)\nenum Color derives DbCodec:\n  case Red, Green, Blue\n\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class User(\n  @Id id: Long,\n  firstName: Option[String],\n  lastName: String,\n  created: OffsetDateTime,\n  favoriteColor: Color\n) derives DbCodec\n```\n\nNewTypes and Opaque Type Alias can cause issues with derivation since given DbCodecs are not available. A simple way to provide them is using DbCodec.bimap:\n\n```scala\nopaque type MyId = Long\n\nobject MyId:\n  def apply(id: Long): MyId =\n    require(id >= 0)\n    id\n\n  extension (myId: MyId)\n    def underlying: Long = myId\n\n  given DbCodec[MyId] =\n    DbCodec[Long].biMap(MyId.apply, _.underlying)\n\ntransact(xa):\n  val id = MyId(123L)\n  sql\"UPDATE my_table SET x = true WHERE id = $id\".update.run()\n```\n\n### `DbCodec`: Typeclass for JDBC reading & writing\n\nDbCodec is a Typeclass for JDBC reading & writing.\n\nBuilt-in DbCodecs are provided for many types, including primitives, dates, Options, and Tuples. You can derive DbCodecs by adding `derives DbCodec` to your case class or enum.\n\n```scala\nval rs: ResultSet = ???\nval ints: Vector[Int] = DbCodec[Int].read(rs)\n\nval ps: PreparedStatement = ???\nDbCodec[Int].writeSingle(22, ps)\n```\n\n### Defining your own DbCodecs\n\nTo modify the JDBC mappings, implement a given DbCodec instance as you would for any Typeclass.\n\n### Future-Proof Queries\n\nA common problem when writing SQL queries is that they're difficult to refactor. When a column or table name changes you have to do a global find & replace. And if you miss a query, it's discovered at runtime.\n\nThere's also lots of repetition when writing SQL. Magnum's repositories help scrap the boilerplate, but writing `SELECT a, b, c, d, ...` for a large table quickly gets tiring.\n\nTo help with this, Magnum offers a `TableInfo` class to enable 'future-proof' queries. An important caveat is that these queries are harder to copy/paste into SQL editors like PgAdmin or DbBeaver.\n\nHere's some examples:\n\n```scala\nimport com.augustnagro.magnum.*\n\ncase class UserCreator(firstName: String, age: Int) derives DbCodec\n\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class User(id: Long, firstName: String, age: Int) derives DbCodec\n\nobject User:\n  val Table = TableInfo[UserCreator, User, Long]\n\ndef allUsers(using DbCon): Vector[User] =\n  val u = User.Table\n  // equiv to \n  // SELECT id, first_name, age FROM user\n  sql\"SELECT ${u.all} FROM $u\".query[User].run()\n\ndef firstNamesForLast(lastName: String)(using DbCon): Vector[String] =\n  val u = User.Table\n  // equiv to\n  // SELECT DISTINCT first_name FROM user WHERE last_name = ?\n  sql\"\"\"\n    SELECT DISTINCT ${u.firstName} FROM $u\n    WHERE ${u.lastName} = $lastName\n  \"\"\".query[String].run()\n\ndef insertOrIgnore(creator: UserCreator)(using DbCon): Unit =\n  val u = User.Table\n  // equiv to\n  // INSERT OR IGNORE INTO user (first_name, age) VALUES (?, ?)\n  sql\"INSERT OR IGNORE INTO $u ${u.insertCols} VALUES ($creator)\".update.run()\n```\n\nIt's important that `val Table = TableInfo[X, Y, Z]` is not explicitly typed, otherwise its structural typing will be destroyed.\n\nIn the case of multiple joins, you can use `TableInfo.alias(String)` to prevent name conflicts:\n\n```scala\nval c = TableInfo[Car].alias(\"c\")\nval p = TableInfo[Person].alias(\"p\")\n\nsql\"\"\"\n   SELECT ${c.all}, ${p.firstName}\n   FROM $c\n   JOIN $p ON ${p.id} = ${c.personId}\n   \"\"\".query.run()\n```\n\n### Splicing Literal Values into Frags\n\nTo splice Strings directly into `sql` statements, you can interpolate `SqlLiteral` values. For example,\n\n```scala\nval table = SqlLiteral(\"beans\")\n  \nsql\"select * from $table\"\n```\n\nThis feature should be used sparingly and never with untrusted input. \n\n### Postgres Module\n\nThe Postgres Module adds support for [Geometric Types](https://www.postgresql.org/docs/current/datatype-geometric.html) and [Arrays](https://www.postgresql.org/docs/current/arrays.html). Postgres Arrays can be decoded into Scala List/Vector/IArray, etc; multi-dimensionality is also supported.\n\n```\n\"com.augustnagro\" %% \"magnumpg\" % \"1.3.0\"\n```\n\nExample: Insert into a table with a `point[]` type column.\n\nWith table:\n\n```sql\ncreate table my_geo (\n  id bigint primary key,\n  pnts point[] not null\n);\n```\n\n```scala\nimport org.postgresql.geometric.*\nimport com.augustnagro.magnum.*\nimport com.augustnagro.magnum.pg.PgCodec.given\n\n@Table(PostgresDbType)\ncase class MyGeo(@Id id: Long, pnts: IArray[PGpoint]) derives DbCodec\n\nval dataSource: javax.sql.DataSource = ???\nval xa = Transactor(dataSource)\n\nval myGeoRepo = Repo[MyGeo, MyGeo, Long]\n\ntransact(xa):\n  myGeoRepo.insert(MyGeo(1L, IArray(PGpoint(1, 1), PGPoint(2, 2))))\n```\n\nThe import of `PgCodec.given` is required to bring Geo/Array DbCodecs into scope.\n\n#### Arrays of Enums\n\nThe `pg` module supports arrays of simple (non-ADT) enums.\n\nIf you want to map an array of [Postgres enums](https://www.postgresql.org/docs/current/datatype-enum.html) to a sequence of Scala enums, use the following import when deriving the DbCodec:\n\n```scala\nimport com.augustnagro.magnum.pg.PgCodec.given\nimport com.augustnagro.magnum.pg.enums.PgEnumToScalaEnumSqlArrayCodec\n\n// in postgres: `create type Color as enum ('Red', 'Green', 'Blue');`\nenum Color derives DbCodec:\n  case Red, Green, Blue\n\n@Table(PostgresDbType)\ncase class Car(@Id id: Long, colors: Vector[Color]) derives DbCodec\n```\n\nIf instead your Postgres type is an array of varchar or text, use the following import:\n\n```scala\nimport com.augustnagro.magnum.pg.enums.PgStringToScalaEnumSqlArrayCodec\n```\n\n### Logging SQL queries\n\nIf you set the java.util Logging level to DEBUG, all SQL queries will be logged.\nSetting to TRACE will log SQL queries and their parameters.\n\n#### Logging Slow Queries\n\nYou can log slow queries by using the `Transactor` class in conjunction with `SqlLogger.logSlowQueries(FiniteDuration)`. See [Customizing Transactions](#customizing-transactions) for an example. You can also implement your own SqlLogger subclass as desired.\n\n## Integrations\n\n### ZIO\n\nMagnum provides a fine layer of integration with ZIO.    \nThe `magnum-zio` module provides an implementation of the `connect` and `transact` utils that return a ZIO effect.\n\nTo use the ZIO integration, add the following dependency:\n```scala\n\"com.augustnagro\" %% \"magnumzio\" % \"x.x.x\"\n```\n\nand import these utils in your code with:\n```scala\nimport com.augustnagro.magnum.magzio.*\n```\n\n## Motivation\n\nHistorically, database clients on the JVM fall into three categories.\n\n* Object Oriented Repositories (Spring-Data, Hibernate)\n* Functional DSLs (JOOQ, Slick, quill, zio-sql)\n* SQL String interpolators (Anorm, doobie, plain jdbc)\n\nMagnum is a Scala 3 library combining aspects of all three,\nproviding a typesafe and refactorable SQL interface,\nwhich can express all SQL expressions, on all JDBC-supported databases.\n\nLike in Zoolander (the movie), Magnum represents a 'new look' for Database access in Scala.\n\n## Feature List\n\n* Supports any database with a JDBC driver,\n  including Postgres, MySql, Oracle, ClickHouse, H2, and Sqlite\n* Efficient `sql\" \"` interpolator\n* Purely-functional API\n* Common queries (like insert, update, delete) generated at compile time\n* Difficult to hit [N+1 query problem](https://stackoverflow.com/questions/97197/what-is-the-n1-selects-problem-in-orm-object-relational-mapping)\n* Type-safe Transactions\n* Supports database-generated columns\n* Easy to use, Loom-ready API (no Futures or Effect Systems)\n* Easy to define entities. Easy to implement DB support & codecs for custom types.\n* Scales to complex SQL queries\n* Specifications for building dynamic queries, such as table filters with pagination\n* Supports high-performance [Seek pagination](https://blog.jooq.org/faster-sql-paging-with-jooq-using-the-seek-method/)\n* Performant batch-queries\n\n## Developing\nThe tests are written using TestContainers, which requires Docker be installed.\n\n## Talks and Blogs\n\n* Scala Days 2023: [slides](/Magnum-Slides-to-Share.pdf), [talk](https://www.youtube.com/watch?v=iKNRS5b1zAY)\n\n## Frequently Asked Questions\n\n#### Does Magnum support nested entities like:\n\n```scala\n@Table(H2DbType, SqlNameMapper.CamelToSnakeCase)\ncase class Company(\n  name: String,\n  address: Address,\n  ) derives DbCodec\n\ncase class Address(\n  street: String,\n  city: String,\n  zipCode: String,\n  country: String\n) derives DbCodec\n```\n\nNO; Magnum only supports deriving flat entity class structures. This keeps things simple and makes it obvious how the Scala entity class maps to the SQL table.\n\nWe may add support for SQL UDTs (user defined types) in the future; however at the time of writing, UDTs are not well-supported by JDBC drivers.\n\nYou could also express the above example using a foreign key to an Address table, like so:\n\n```scala\n@Table(H2DbType, SqlNameMapper.CamelToSnakeCase)\ncase class Company(\n  name: String,\n  addressId: AddressId,\n) derives DbCodec\n\nopaque type AddressId = Long\nobject AddressId:\n  def apply(id: Long): AddressId = id\n  extension (id: AddressId)\n    def underlying: Long = id\n  given DbCodec[AddressId] =\n    DbCodec[Long].biMap(AddressId.apply, _.underlying)\n\n@Table(H2DbType, SqlNameMapper.CamelToSnakeCase)\ncase class Address(\n  @Id id: AddressId,\n  street: String,\n  city: String,\n  zipCode: String,\n  country: String\n) derives DbCodec\n```\n\n#### UUID DbCodec doesn't work for my database\n\nSome databases directly support the UUID type; these include Postgres, Clickhouse, and H2. When using the built-in `DbCodec[UUID]`, defined in `DbCodec.scala`, serialization and deserialization of `java.util.UUID` will work as expected.\n\nOther databases like MySql, Oracle, and Sqlite, however, do not natively support UUID columns. Users have to choose an alternate datatype to store the UUID: most commonly `varchar(36)` or `binary(16)`. The JDBC drivers for these databases do not support direct serialization and deserialization of `java.util.UUID`, therefore the default `DbCodec[UUID]` will not be sufficient. Instead, import the appropriate codec from `com.augustnagro.magnum.UUIDCodec`. For example,\n\n```scala\nimport com.augustnagro.magnum.*\nimport com.augustnagro.magnum.UUIDCodec.VarCharUUIDCodec\nimport java.util.UUID\n\n@Table(MySqlDbType)\ncase class Person(@Id id: Long, name: String, tracking_id: Option[UUID]) derives DbCodec\n```\n\n## Todo\n* JSON / XML support\n* Support MSSql\n* Cats Effect & ZIO modules\n* Explicit Nulls support\n"
  },
  {
    "path": "build.sbt",
    "content": "ThisBuild / organization := \"com.augustnagro\"\nThisBuild / version := \"2.0.0-SNAPSHOT\"\nThisBuild / versionScheme := Some(\"early-semver\")\nThisBuild / scalaVersion := \"3.3.7\"\nThisBuild / scalacOptions ++= Seq(\"-deprecation\")\nThisBuild / homepage := Some(url(\"https://github.com/AugustNagro/magnum\"))\nThisBuild / licenses += (\n  \"Apache-2.0\",\n  url(\n    \"https://opensource.org/licenses/Apache-2.0\"\n  )\n)\nThisBuild / scmInfo := Some(\n  ScmInfo(\n    url(\"https://github.com/AugustNagro/magnum\"),\n    \"scm:git:git@github.com:augustnagro/magnum.git\",\n    Some(\"scm:git:git@github.com:augustnagro/magnum.git\")\n  )\n)\nThisBuild / developers := List(\n  Developer(\n    id = \"augustnagro@gmail.com\",\n    name = \"August Nagro\",\n    email = \"augustnagro@gmail.com\",\n    url = url(\"https://augustnagro.com\")\n  )\n)\nThisBuild / publishMavenStyle := true\nThisBuild / pomIncludeRepository := { _ => false }\nThisBuild / publishTo := {\n  val centralSnapshots =\n    \"https://central.sonatype.com/repository/maven-snapshots/\"\n  if (isSnapshot.value) Some(\"central-snapshots\" at centralSnapshots)\n  else localStaging.value\n}\nThisBuild / publish / skip := true\n\naddCommandAlias(\"fmt\", \"scalafmtAll\")\n\nval testcontainersVersion = \"0.44.1\"\nval circeVersion = \"0.14.10\"\nval munitVersion = \"1.1.0\"\nval postgresDriverVersion = \"42.7.4\"\n\nlazy val root = project\n  .in(file(\".\"))\n  .aggregate(magnum, magnumPg, magnumZio)\n\nlazy val magnum = project\n  .in(file(\"magnum\"))\n  .settings(\n    publish / skip := false,\n    libraryDependencies ++= Seq(\n      \"org.scalameta\" %% \"munit\" % munitVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-munit\" % testcontainersVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-postgresql\" % testcontainersVersion % Test,\n      \"org.postgresql\" % \"postgresql\" % postgresDriverVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-mysql\" % testcontainersVersion % Test,\n      \"com.mysql\" % \"mysql-connector-j\" % \"9.0.0\" % Test,\n      \"com.h2database\" % \"h2\" % \"2.3.232\" % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-oracle-xe\" % testcontainersVersion % Test,\n      \"com.oracle.database.jdbc\" % \"ojdbc11\" % \"21.9.0.0\" % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-clickhouse\" % testcontainersVersion % Test,\n      \"com.clickhouse\" % \"clickhouse-jdbc\" % \"0.6.0\" % Test classifier \"http\",\n      \"org.xerial\" % \"sqlite-jdbc\" % \"3.46.1.3\" % Test\n    )\n  )\n\nlazy val magnumPg = project\n  .in(file(\"magnum-pg\"))\n  .dependsOn(magnum)\n  .settings(\n    publish / skip := false,\n    libraryDependencies ++= Seq(\n      \"org.postgresql\" % \"postgresql\" % postgresDriverVersion % \"provided\",\n      \"org.scalameta\" %% \"munit\" % munitVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-munit\" % testcontainersVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-postgresql\" % testcontainersVersion % Test,\n      \"io.circe\" %% \"circe-core\" % circeVersion % Test,\n      \"io.circe\" %% \"circe-parser\" % circeVersion % Test,\n      \"org.scala-lang.modules\" %% \"scala-xml\" % \"2.3.0\" % Test\n    )\n  )\n\nlazy val magnumZio = project\n  .in(file(\"magnum-zio\"))\n  .dependsOn(magnum)\n  .settings(\n    publish / skip := false,\n    libraryDependencies ++= Seq(\n      \"dev.zio\" %% \"zio\" % \"2.1.24\" % Provided,\n      \"org.scalameta\" %% \"munit\" % munitVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-munit\" % testcontainersVersion % Test,\n      \"com.dimafeng\" %% \"testcontainers-scala-postgresql\" % testcontainersVersion % Test,\n      \"org.postgresql\" % \"postgresql\" % postgresDriverVersion % Test\n    )\n  )\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/BatchUpdateResult.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.util.boundary\n\n/** The total number of rows updated, or SuccessNoInfo if unknown. */\nenum BatchUpdateResult:\n  case Success(rowsUpdated: Long)\n  case SuccessNoInfo\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/ClickhouseDbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport java.util.StringJoiner\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using, boundary}\n\nobject ClickhouseDbType extends DbType:\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    require(\n      eClassTag.runtimeClass == ecClassTag.runtimeClass,\n      \"ClickHouse does not support generated keys, so EC must equal E\"\n    )\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT $selectKeys FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT $selectKeys FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"TRUNCATE TABLE $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        SpecImpl.Default.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        throw UnsupportedOperationException()\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update\n          .run()\n\n      def truncate()(using DbCon): Unit =\n        truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed:\n              ps.executeUpdate()\n              entityCreator.asInstanceOf[E]\n\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed:\n              batchUpdateResult(ps.executeBatch())\n              entityCreators.toVector.asInstanceOf[Vector[E]]\n\n      def update(entity: E)(using DbCon): Unit =\n        throw UnsupportedOperationException()\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        throw UnsupportedOperationException()\n\n    end new\n  end buildRepoDefaults\nend ClickhouseDbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/ColumnName.scala",
    "content": "package com.augustnagro.magnum\n\n/** Represents an entity column. Can be interpolated in sql\"\" expressions */\nclass ColumnName(\n    val scalaName: String,\n    val sqlName: String,\n    val queryRepr: String\n) extends SqlLiteral\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/ColumnNames.scala",
    "content": "package com.augustnagro.magnum\n\n/** A grouping of schema names, which may be interpolated in sql\"\" expressions.\n  * @param queryRepr\n  *   The query representation. For example, \"myColA, myColB\"\n  * @param columnNames\n  *   The column names.\n  */\nclass ColumnNames(val queryRepr: String, val columnNames: IArray[ColumnName])\n    extends SqlLiteral\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/DbCodec.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.net.URL\nimport java.sql.{JDBCType, PreparedStatement, ResultSet, Types}\nimport java.time.{\n  Instant,\n  LocalDate,\n  LocalDateTime,\n  LocalTime,\n  OffsetDateTime,\n  ZoneId,\n  ZoneOffset\n}\nimport java.util.UUID\nimport scala.annotation.implicitNotFound\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonAll,\n  summonFrom,\n  summonInline\n}\nimport scala.quoted.*\nimport scala.reflect.ClassTag\nimport scala.util.boundary\n\n/** Typeclass for JDBC reading & writing.\n  */\ntrait DbCodec[E]:\n  self =>\n\n  /** Syntax used when querying the db. For example,\n    *\n    * DbCodec[Int].queryRepr == \"?\"\n    *\n    * DbCodec[(String, Boolean)].queryRepr = \"(?, ?)\"\n    *\n    * case class User(id: Long, name: String) derives DbCodec\n    * DbCodec[User].queryRepr = \"? ?\"\n    */\n  def queryRepr: String\n\n  /** The `java.sql.Types` constant for every \"?\" in `queryRepr`. For mapping\n    * database-specific types, Types.JAVA_OBJECT is recommended.\n    */\n  def cols: IArray[Int]\n\n  /** Read an E from the ResultSet starting at position `pos` and ending after\n    * reading `cols` number of columns. Make sure the ResultSet is in a valid\n    * state (ie, ResultSet::next has been called).\n    */\n  def readSingle(resultSet: ResultSet, pos: Int): E\n\n  /** Build an E from the ResultSet starting at position 1 and ending after\n    * reading `cols` number of columns. Make sure the ResultSet is in a valid\n    * state (ie, ResultSet::next has been called).\n    */\n  def readSingle(resultSet: ResultSet): E = readSingle(resultSet, 1)\n\n  /** Read an Option[E] from the ResultSet starting at position `pos` and ending\n    * after reading `cols` number of columns. Make sure the ResultSet is in a\n    * valid state (ie, ResultSet::next has been called).\n    */\n  def readSingleOption(resultSet: ResultSet, pos: Int): Option[E]\n\n  /** Build every row in the ResultSet into a sequence of E. The ResultSet\n    * should be in its initial position before calling (ie, ResultSet::next not\n    * called).\n    */\n  def read(resultSet: ResultSet): Vector[E] =\n    val res = Vector.newBuilder[E]\n    while resultSet.next() do res += readSingle(resultSet)\n    res.result()\n\n  /** Write the entity to the PreparedStatement starting at position `pos` */\n  def writeSingle(entity: E, ps: PreparedStatement, pos: Int): Unit\n\n  /** Write the entity to the resultSet starting at position 1 */\n  def writeSingle(entity: E, ps: PreparedStatement): Unit =\n    writeSingle(entity, ps, 1)\n\n  /** Writes multiple entities to the preparedStatement via\n    * PreparedStatement::addBatch\n    */\n  def write(entities: Iterable[E], ps: PreparedStatement): Unit =\n    for e <- entities do\n      writeSingle(e, ps)\n      ps.addBatch()\n\n  def biMap[E2](to: E => E2, from: E2 => E): DbCodec[E2] =\n    new DbCodec[E2]:\n      val cols: IArray[Int] = self.cols\n      def readSingle(rs: ResultSet, pos: Int): E2 =\n        to(self.readSingle(rs, pos))\n      def readSingleOption(rs: ResultSet, pos: Int): Option[E2] =\n        self.readSingleOption(rs, pos).map(to)\n      def writeSingle(e: E2, ps: PreparedStatement, pos: Int): Unit =\n        self.writeSingle(from(e), ps, pos)\n      def queryRepr: String = self.queryRepr\nend DbCodec\n\nobject DbCodec:\n\n  inline def apply[E](using codec: DbCodec[E]): DbCodec[E] = codec\n\n  given AnyCodec: DbCodec[Any] with\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(rs: ResultSet, pos: Int): Any = rs.getObject(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Any] =\n      Option(rs.getObject(pos))\n    def writeSingle(a: Any, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, a)\n    def queryRepr: String = \"?\"\n\n  given StringCodec: DbCodec[String] with\n    val cols: IArray[Int] = IArray(Types.VARCHAR)\n    def readSingle(rs: ResultSet, pos: Int): String = rs.getString(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[String] =\n      Option(rs.getString(pos))\n    def writeSingle(s: String, ps: PreparedStatement, pos: Int): Unit =\n      ps.setString(pos, s)\n    def queryRepr: String = \"?\"\n\n  given BooleanCodec: DbCodec[Boolean] with\n    val cols: IArray[Int] = IArray(Types.BOOLEAN)\n    def readSingle(rs: ResultSet, pos: Int): Boolean = rs.getBoolean(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Boolean] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(b: Boolean, ps: PreparedStatement, pos: Int): Unit =\n      ps.setBoolean(pos, b)\n    def queryRepr: String = \"?\"\n\n  given ByteCodec: DbCodec[Byte] with\n    val cols: IArray[Int] = IArray(Types.TINYINT)\n    def readSingle(rs: ResultSet, pos: Int): Byte = rs.getByte(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Byte] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(b: Byte, ps: PreparedStatement, pos: Int): Unit =\n      ps.setByte(pos, b)\n    def queryRepr: String = \"?\"\n\n  given ShortCodec: DbCodec[Short] with\n    val cols: IArray[Int] = IArray(Types.SMALLINT)\n    def readSingle(rs: ResultSet, pos: Int): Short = rs.getShort(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Short] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(s: Short, ps: PreparedStatement, pos: Int): Unit =\n      ps.setShort(pos, s)\n    def queryRepr: String = \"?\"\n\n  given IntCodec: DbCodec[Int] with\n    val cols: IArray[Int] = IArray(Types.INTEGER)\n    def readSingle(rs: ResultSet, pos: Int): Int = rs.getInt(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Int] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(i: Int, ps: PreparedStatement, pos: Int): Unit =\n      ps.setInt(pos, i)\n    def queryRepr: String = \"?\"\n\n  given LongCodec: DbCodec[Long] with\n    val cols: IArray[Int] = IArray(Types.BIGINT)\n    def readSingle(rs: ResultSet, pos: Int): Long = rs.getLong(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Long] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(l: Long, ps: PreparedStatement, pos: Int): Unit =\n      ps.setLong(pos, l)\n    def queryRepr: String = \"?\"\n\n  given FloatCodec: DbCodec[Float] with\n    val cols: IArray[Int] = IArray(Types.REAL)\n    def readSingle(rs: ResultSet, pos: Int): Float = rs.getFloat(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Float] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(f: Float, ps: PreparedStatement, pos: Int): Unit =\n      ps.setFloat(pos, f)\n    def queryRepr: String = \"?\"\n\n  given DoubleCodec: DbCodec[Double] with\n    val cols: IArray[Int] = IArray(Types.DOUBLE)\n    def readSingle(rs: ResultSet, pos: Int): Double = rs.getDouble(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Double] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(d: Double, ps: PreparedStatement, pos: Int): Unit =\n      ps.setDouble(pos, d)\n    def queryRepr: String = \"?\"\n\n  given ByteArrayCodec: DbCodec[Array[Byte]] with\n    val cols: IArray[Int] = IArray(Types.BINARY)\n    def readSingle(rs: ResultSet, pos: Int): Array[Byte] = rs.getBytes(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Array[Byte]] =\n      Option(rs.getBytes(pos))\n    def writeSingle(bytes: Array[Byte], ps: PreparedStatement, pos: Int): Unit =\n      ps.setBytes(pos, bytes)\n    def queryRepr: String = \"?\"\n\n  given ByteIArrayCodec: DbCodec[IArray[Byte]] with\n    val cols: IArray[Int] = IArray(Types.BINARY)\n    def readSingle(rs: ResultSet, pos: Int): IArray[Byte] =\n      IArray.unsafeFromArray(rs.getBytes(pos))\n    def readSingleOption(rs: ResultSet, pos: Int): Option[IArray[Byte]] =\n      ByteArrayCodec.readSingleOption(rs, pos).map(IArray.unsafeFromArray)\n    def writeSingle(\n        bytes: IArray[Byte],\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit =\n      ps.setBytes(pos, IArray.genericWrapArray(bytes).toArray)\n    def queryRepr: String = \"?\"\n\n  given SqlDateCodec: DbCodec[java.sql.Date] with\n    val cols: IArray[Int] = IArray(Types.DATE)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Date = rs.getDate(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Date] =\n      Option(rs.getDate(pos))\n    def writeSingle(\n        date: java.sql.Date,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit = ps.setDate(pos, date)\n    def queryRepr: String = \"?\"\n\n  given SqlTimeCodec: DbCodec[java.sql.Time] with\n    val cols: IArray[Int] = IArray(Types.TIME)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Time =\n      rs.getTime(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Time] =\n      Option(rs.getTime(pos))\n    def writeSingle(\n        time: java.sql.Time,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit = ps.setTime(pos, time)\n    def queryRepr: String = \"?\"\n\n  given SqlTimestampCodec: DbCodec[java.sql.Timestamp] with\n    val cols: IArray[Int] = IArray(Types.TIMESTAMP)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Timestamp =\n      rs.getTimestamp(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Timestamp] =\n      Option(rs.getTimestamp(pos))\n    def writeSingle(\n        t: java.sql.Timestamp,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit = ps.setTimestamp(pos, t)\n    def queryRepr: String = \"?\"\n\n  given OffsetDateTimeCodec: DbCodec[OffsetDateTime] with\n    val cols: IArray[Int] = IArray(Types.TIMESTAMP_WITH_TIMEZONE)\n    def readSingle(rs: ResultSet, pos: Int): OffsetDateTime =\n      rs.getObject(pos, classOf[OffsetDateTime])\n    def readSingleOption(rs: ResultSet, pos: Int): Option[OffsetDateTime] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(dt: OffsetDateTime, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, dt)\n    def queryRepr: String = \"?\"\n\n  given InstantCodec: DbCodec[Instant] =\n    OffsetDateTimeCodec.biMap(_.toInstant, _.atOffset(ZoneOffset.UTC))\n\n  given LocalDateCodec: DbCodec[LocalDate] with\n    val cols: IArray[Int] = IArray(Types.DATE)\n    def readSingle(rs: ResultSet, pos: Int): LocalDate =\n      rs.getObject(pos, classOf[LocalDate])\n    def readSingleOption(rs: ResultSet, pos: Int): Option[LocalDate] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(ld: LocalDate, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, ld)\n    def queryRepr: String = \"?\"\n\n  given LocalTimeCodec: DbCodec[LocalTime] with\n    val cols: IArray[Int] = IArray(Types.TIME)\n    def readSingle(rs: ResultSet, pos: Int): LocalTime =\n      rs.getObject(pos, classOf[LocalTime])\n    def readSingleOption(rs: ResultSet, pos: Int): Option[LocalTime] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(lt: LocalTime, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, lt)\n    def queryRepr: String = \"?\"\n\n  given LocalDateTimeCodec: DbCodec[LocalDateTime] with\n    val cols: IArray[Int] = IArray(Types.TIMESTAMP)\n    def readSingle(rs: ResultSet, pos: Int): LocalDateTime =\n      rs.getObject(pos, classOf[LocalDateTime])\n    def readSingleOption(rs: ResultSet, pos: Int): Option[LocalDateTime] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(ldt: LocalDateTime, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, ldt)\n    def queryRepr: String = \"?\"\n\n  given ZoneIdCodec: DbCodec[ZoneId] =\n    StringCodec.biMap(ZoneId.of, _.toString)\n\n  given SqlRefCodec: DbCodec[java.sql.Ref] with\n    val cols: IArray[Int] = IArray(Types.REF)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Ref = rs.getRef(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Ref] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(ref: java.sql.Ref, ps: PreparedStatement, pos: Int): Unit =\n      ps.setRef(pos, ref)\n    def queryRepr: String = \"?\"\n\n  given SqlBlobCodec: DbCodec[java.sql.Blob] with\n    val cols: IArray[Int] = IArray(Types.BLOB)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Blob = rs.getBlob(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Blob] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(b: java.sql.Blob, ps: PreparedStatement, pos: Int): Unit =\n      ps.setBlob(pos, b)\n    def queryRepr: String = \"?\"\n\n  given SqlClobCodec: DbCodec[java.sql.Clob] with\n    val cols: IArray[Int] = IArray(Types.CLOB)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.Clob = rs.getClob(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.Clob] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(c: java.sql.Clob, ps: PreparedStatement, pos: Int): Unit =\n      ps.setClob(pos, c)\n    def queryRepr: String = \"?\"\n\n  given URLCodec: DbCodec[URL] with\n    val cols: IArray[Int] = IArray(Types.VARCHAR)\n    def readSingle(rs: ResultSet, pos: Int): URL = rs.getURL(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[URL] =\n      Option(rs.getURL(pos))\n    def writeSingle(url: URL, ps: PreparedStatement, pos: Int): Unit =\n      ps.setURL(pos, url)\n    def queryRepr: String = \"?\"\n\n  given RowIdCodec: DbCodec[java.sql.RowId] with\n    val cols: IArray[Int] = IArray(Types.ROWID)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.RowId = rs.getRowId(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.RowId] =\n      Option(rs.getRowId(pos))\n    def writeSingle(\n        rowId: java.sql.RowId,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit =\n      ps.setRowId(pos, rowId)\n    def queryRepr: String = \"?\"\n\n  given SqlNClobCodec: DbCodec[java.sql.NClob] with\n    val cols: IArray[Int] = IArray(Types.NCLOB)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.NClob = rs.getNClob(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.NClob] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(nc: java.sql.NClob, ps: PreparedStatement, pos: Int): Unit =\n      ps.setNClob(pos, nc)\n    def queryRepr: String = \"?\"\n\n  given SqlXmlCodec: DbCodec[java.sql.SQLXML] with\n    val cols: IArray[Int] = IArray(Types.SQLXML)\n    def readSingle(rs: ResultSet, pos: Int): java.sql.SQLXML = rs.getSQLXML(pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[java.sql.SQLXML] =\n      readOptImpl(this, rs, pos)\n    def writeSingle(s: java.sql.SQLXML, ps: PreparedStatement, pos: Int): Unit =\n      ps.setSQLXML(pos, s)\n    def queryRepr: String = \"?\"\n\n  given JavaBigDecimalCodec: DbCodec[java.math.BigDecimal] with\n    val cols: IArray[Int] = IArray(Types.NUMERIC)\n    def readSingle(rs: ResultSet, pos: Int): java.math.BigDecimal =\n      rs.getBigDecimal(pos)\n    def readSingleOption(\n        rs: ResultSet,\n        pos: Int\n    ): Option[java.math.BigDecimal] =\n      Option(rs.getBigDecimal(pos))\n    def writeSingle(\n        bd: java.math.BigDecimal,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit =\n      ps.setBigDecimal(pos, bd)\n    def queryRepr: String = \"?\"\n\n  given ScalaBigDecimalCodec: DbCodec[scala.math.BigDecimal] with\n    val cols: IArray[Int] = IArray(Types.NUMERIC)\n    def readSingle(rs: ResultSet, pos: Int): scala.math.BigDecimal =\n      scala.math.BigDecimal(rs.getBigDecimal(pos))\n    def readSingleOption(rs: ResultSet, pos: Int): Option[BigDecimal] =\n      JavaBigDecimalCodec\n        .readSingleOption(rs, pos)\n        .map(scala.math.BigDecimal.apply)\n    def writeSingle(\n        bd: scala.math.BigDecimal,\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit =\n      ps.setBigDecimal(pos, bd.underlying)\n    def queryRepr: String = \"?\"\n\n  given UUIDCodec: DbCodec[UUID] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.OTHER)\n    def readSingle(rs: ResultSet, pos: Int): UUID =\n      rs.getObject(pos, classOf[UUID])\n    def readSingleOption(rs: ResultSet, pos: Int): Option[UUID] =\n      val res = rs.getObject(pos, classOf[UUID])\n      if rs.wasNull then None\n      else Some(res)\n    def writeSingle(entity: UUID, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given OptionCodec[A](using codec: DbCodec[A]): DbCodec[Option[A]] with\n    def cols: IArray[Int] = codec.cols\n    def readSingle(rs: ResultSet, pos: Int): Option[A] =\n      codec.readSingleOption(rs, pos)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Option[A]] =\n      Some(codec.readSingleOption(rs, pos))\n    def writeSingle(opt: Option[A], ps: PreparedStatement, pos: Int): Unit =\n      opt match\n        case Some(a) =>\n          codec.writeSingle(a, ps, pos)\n        case None =>\n          for i <- cols.indices do ps.setNull(pos + i, cols(i))\n    def queryRepr: String = codec.queryRepr\n\n  given SomeCodec[A](using codec: DbCodec[A]): DbCodec[Some[A]] with\n    def cols: IArray[Int] = codec.cols\n    def readSingle(rs: ResultSet, pos: Int): Some[A] =\n      Some(codec.readSingle(rs, pos))\n    def readSingleOption(rs: ResultSet, pos: Int): Option[Some[A]] =\n      codec.readSingleOption(rs, pos).map(Some.apply)\n    def writeSingle(s: Some[A], ps: PreparedStatement, pos: Int): Unit =\n      codec.writeSingle(s.get, ps, pos)\n    def queryRepr: String = codec.queryRepr\n\n  given Tuple2Codec[A, B](using\n      aCodec: DbCodec[A],\n      bCodec: DbCodec[B]\n  ): DbCodec[(A, B)] with\n    val cols: IArray[Int] = IArray.concat(aCodec.cols, bCodec.cols)\n    def readSingle(rs: ResultSet, pos: Int): (A, B) = (\n      aCodec.readSingle(rs, pos),\n      bCodec.readSingle(rs, pos + aCodec.cols.length)\n    )\n    def readSingleOption(rs: ResultSet, pos: Int): Option[(A, B)] =\n      val a = aCodec.readSingleOption(rs, pos)\n      val b = bCodec.readSingleOption(rs, pos + aCodec.cols.length)\n      (a, b) match\n        case (Some(a), Some(b)) => Some((a, b))\n        case _                  => None\n    def writeSingle(tup: (A, B), ps: PreparedStatement, pos: Int): Unit =\n      aCodec.writeSingle(tup._1, ps, pos)\n      bCodec.writeSingle(tup._2, ps, pos + aCodec.cols.length)\n    val queryRepr: String = s\"(${aCodec.queryRepr}, ${bCodec.queryRepr})\"\n\n  given Tuple3Codec[A, B, C](using\n      aCodec: DbCodec[A],\n      bCodec: DbCodec[B],\n      cCodec: DbCodec[C]\n  ): DbCodec[(A, B, C)] with\n    val cols: IArray[Int] =\n      IArray.concat(aCodec.cols, bCodec.cols, cCodec.cols)\n    def readSingle(rs: ResultSet, pos: Int): (A, B, C) =\n      var i = pos\n      val a = aCodec.readSingle(rs, i)\n      i += aCodec.cols.length\n      val b = bCodec.readSingle(rs, i)\n      i += bCodec.cols.length\n      val c = cCodec.readSingle(rs, i)\n      (a, b, c)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[(A, B, C)] =\n      var i = pos\n      val a = aCodec.readSingleOption(rs, i)\n      i += aCodec.cols.length\n      val b = bCodec.readSingleOption(rs, i)\n      i += bCodec.cols.length\n      val c = cCodec.readSingleOption(rs, i)\n      (a, b, c) match\n        case (Some(a), Some(b), Some(c)) => Some((a, b, c))\n        case _                           => None\n    def writeSingle(tup: (A, B, C), ps: PreparedStatement, pos: Int): Unit =\n      var i = pos\n      aCodec.writeSingle(tup._1, ps, i)\n      i += aCodec.cols.length\n      bCodec.writeSingle(tup._2, ps, i)\n      i += bCodec.cols.length\n      cCodec.writeSingle(tup._3, ps, i)\n    val queryRepr: String =\n      s\"(${aCodec.queryRepr}, ${bCodec.queryRepr}, ${cCodec.queryRepr})\"\n  end Tuple3Codec\n\n  given Tuple4Codec[A, B, C, D](using\n      aCodec: DbCodec[A],\n      bCodec: DbCodec[B],\n      cCodec: DbCodec[C],\n      dCodec: DbCodec[D]\n  ): DbCodec[(A, B, C, D)] with\n    val cols: IArray[Int] =\n      IArray.concat(aCodec.cols, bCodec.cols, cCodec.cols, dCodec.cols)\n    def readSingle(rs: ResultSet, pos: Int): (A, B, C, D) =\n      var i = pos\n      val a = aCodec.readSingle(rs, i)\n      i += aCodec.cols.length\n      val b = bCodec.readSingle(rs, i)\n      i += bCodec.cols.length\n      val c = cCodec.readSingle(rs, i)\n      i += cCodec.cols.length\n      val d = dCodec.readSingle(rs, i)\n      (a, b, c, d)\n    def readSingleOption(rs: ResultSet, pos: Int): Option[(A, B, C, D)] =\n      var i = pos\n      val a = aCodec.readSingleOption(rs, i)\n      i += aCodec.cols.length\n      val b = bCodec.readSingleOption(rs, i)\n      i += bCodec.cols.length\n      val c = cCodec.readSingleOption(rs, i)\n      i += cCodec.cols.length\n      val d = dCodec.readSingleOption(rs, i)\n      (a, b, c, d) match\n        case (Some(a), Some(b), Some(c), Some(d)) => Some((a, b, c, d))\n        case _                                    => None\n    def writeSingle(tup: (A, B, C, D), ps: PreparedStatement, pos: Int): Unit =\n      var i = pos\n      aCodec.writeSingle(tup._1, ps, i)\n      i += aCodec.cols.length\n      bCodec.writeSingle(tup._2, ps, i)\n      i += bCodec.cols.length\n      cCodec.writeSingle(tup._3, ps, i)\n      i += cCodec.cols.length\n      dCodec.writeSingle(tup._4, ps, i)\n    val queryRepr: String =\n      s\"(${aCodec.queryRepr}, ${bCodec.queryRepr}, ${cCodec.queryRepr}, ${dCodec.queryRepr})\"\n  end Tuple4Codec\n\n  inline given TupleNCodec[T <: Tuple]: DbCodec[T] = ${ tupleNCodecImpl[T] }\n\n  private def codecExprs[T <: Tuple: Type](\n      res: Vector[Expr[DbCodec[?]]] = Vector.empty\n  )(using Quotes): Expr[IArray[DbCodec[?]]] =\n    import quotes.reflect.*\n    Type.of[T] match\n      case '[EmptyTuple] => '{ IArray.from(${ Expr.ofSeq(res) }) }\n      case '[t *: ts] =>\n        val tCodec = Expr.summon[DbCodec[t]].getOrElse {\n          report.errorAndAbort(s\"No DbCodec found for type ${Type.show[t]}\")\n        }\n        codecExprs[ts](res :+ tCodec)\n\n  def tupleNCodecImpl[T <: Tuple: Type](using Quotes): Expr[DbCodec[T]] =\n    import quotes.reflect.*\n    Type.of[T] match\n      case '[EmptyTuple] =>\n        report.errorAndAbort(\"Cannot derive DbCodec for EmptyTuple\")\n      case '[t *: ts] =>\n        val tCodecsExpr = codecExprs[t *: ts]()\n        '{\n          new DbCodec[t *: ts] {\n            val tCodecs = ${ tCodecsExpr }\n            val cols: IArray[Int] =\n              tCodecs.flatMap(codec => codec.cols)\n\n            def readSingle(rs: ResultSet, pos: Int): t *: ts =\n              val tupleSize = constValue[Tuple.Size[t *: ts]]\n              val result = Array.ofDim[Any](tupleSize)\n              var tupleIdx = 0\n              var psIdx = pos\n              while tupleIdx < tupleSize do\n                val codec = tCodecs(tupleIdx)\n                result(tupleIdx) = codec.readSingle(rs, psIdx)\n                tupleIdx += 1\n                psIdx += codec.cols.length\n              Tuple.fromArray(result).asInstanceOf[t *: ts]\n\n            def readSingleOption(rs: ResultSet, pos: Int): Option[t *: ts] =\n              boundary:\n                val tupleSize = constValue[Tuple.Size[t *: ts]]\n                val res = Array.ofDim[Any](tupleSize)\n                var tupleIdx = 0\n                var psIdx = pos\n                while tupleIdx < tupleSize do\n                  val codec = tCodecs(tupleIdx)\n                  codec.readSingleOption(rs, psIdx) match\n                    case Some(value) => res(tupleIdx) = value\n                    case None        => boundary.break(Option.empty)\n                  tupleIdx += 1\n                  psIdx += codec.cols.length\n                Some(Tuple.fromArray(res)).asInstanceOf[Option[t *: ts]]\n\n            def writeSingle(e: t *: ts, ps: PreparedStatement, pos: Int): Unit =\n              val tupleSize = constValue[Tuple.Size[t *: ts]]\n              var tupleIdx = 0\n              var psIdx = pos\n              while tupleIdx < tupleSize do\n                val codec = tCodecs(tupleIdx)\n                codec\n                  .asInstanceOf[DbCodec[Any]]\n                  .writeSingle(e.productElement(tupleIdx), ps, psIdx)\n                tupleIdx += 1\n                psIdx += codec.cols.length\n\n            val queryRepr: String =\n              tCodecs.map(_.queryRepr).mkString(\"(\", \", \", \")\")\n          }.asInstanceOf[DbCodec[T]]\n        }\n    end match\n  end tupleNCodecImpl\n\n  private inline def readOptImpl[A](\n      codec: DbCodec[A],\n      resultSet: ResultSet,\n      pos: Int\n  ): Option[A] =\n    val res = codec.readSingle(resultSet, pos)\n    if resultSet.wasNull then None\n    else Some(res)\n\n  inline def derived[E: Mirror.Of]: DbCodec[E] =\n    ${ dbCodecImpl[E] }\n\n  private def dbCodecImpl[E: Type](using Quotes): Expr[DbCodec[E]] =\n    import quotes.reflect.*\n    val mirror = Expr.summon[Mirror.Of[E]].getOrElse {\n      report.errorAndAbort(\n        \"Can only derive DbCodec for case classes, sealed traits or enums (products and sums).\"\n      )\n    }\n    mirror match\n      case '{\n            $mp: Mirror.ProductOf[E] {\n              type MirroredElemTypes = mets\n            }\n          } =>\n        val colsExpr = buildColsExpr[mets]()\n        '{\n          new DbCodec[E] {\n            val cols: IArray[Int] = $colsExpr\n            def readSingle(rs: ResultSet, pos: Int): E =\n              ${\n                productReadSingle[E, mets]('{ rs }, mp, Vector.empty, '{ pos })\n              }\n            def readSingleOption(rs: ResultSet, pos: Int): Option[E] =\n              ${\n                productReadOption[E, mets]('{ rs }, mp, Vector.empty, '{ pos })\n              }\n            def writeSingle(e: E, ps: PreparedStatement, pos: Int): Unit =\n              ${\n                productWriteSingle[E, mets]('{ e }, '{ ps }, '{ pos }, '{ 0 })\n              }\n            val queryRepr: String = ${ productQueryRepr[mets]() }\n          }\n        }\n      case '{\n            $ms: Mirror.SumOf[E] {\n              type MirroredElemTypes = mets\n              type MirroredElemLabels = mels\n              type MirroredLabel = mel\n            }\n          } =>\n        val nameMapExpr = DerivingUtil.buildSqlNameMapForEnum[E, mels, mets]\n        val melExpr = Expr(Type.valueOfConstant[mel].get.toString)\n        '{\n          new DbCodec[E] {\n            val nameMap: Seq[(String, E)] = $nameMapExpr\n            val cols: IArray[Int] = IArray(Types.VARCHAR)\n            def readSingle(rs: ResultSet, pos: Int): E =\n              val str = rs.getString(pos)\n              nameMap.find((name, _) => name == str) match\n                case Some((_, v)) => v\n                case None =>\n                  throw IllegalArgumentException(\n                    str + \" not convertible to \" + $melExpr\n                  )\n            def readSingleOption(rs: ResultSet, pos: Int): Option[E] =\n              Option(rs.getString(pos)).map(str =>\n                nameMap.find((name, _) => name == str) match\n                  case Some((_, v)) => v\n                  case None =>\n                    throw IllegalArgumentException(\n                      str + \" not convertible to \" + $melExpr\n                    )\n              )\n            def writeSingle(entity: E, ps: PreparedStatement, pos: Int): Unit =\n              nameMap.find((_, v) => v == entity) match\n                case Some((k, _)) => ps.setString(pos, k)\n                case None =>\n                  throw IllegalArgumentException(\n                    entity.toString + \" not convertible to \" + $melExpr\n                  )\n            def queryRepr: String = \"?\"\n          }\n        }\n    end match\n  end dbCodecImpl\n\n  private def productQueryRepr[Mets: Type](\n      elemReprs: Vector[Expr[String]] = Vector.empty\n  )(using Quotes): Expr[String] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        Expr.summon[DbCodec[met]] match\n          case Some(codec) =>\n            productQueryRepr[metTail](elemReprs :+ '{ $codec.queryRepr })\n          case None =>\n            productQueryRepr[metTail](elemReprs :+ '{ \"?\" })\n      case '[EmptyTuple] =>\n        val seqExpr = Expr.ofSeq(elemReprs)\n        '{ $seqExpr.mkString(\", \") }\n\n  private def buildColsExpr[Mets: Type](\n      res: Vector[Expr[IArray[Int]]] = Vector.empty\n  )(using Quotes): Expr[IArray[Int]] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        val metCodec = Expr.summon[DbCodec[met]].getOrElse {\n          val metType = TypeRepr.of[met].show\n          report.errorAndAbort(\n            s\"Cannot find a DbCodec instance for $metType! Provide one or derive it.\"\n          )\n        }\n        val newCols = '{ $metCodec.cols }\n        buildColsExpr[metTail](res :+ newCols)\n      case '[EmptyTuple] =>\n        '{\n          val iArrays: Seq[IArray[Int]] = ${ Expr.ofSeq(res) }\n          IArray.concat(iArrays*)\n        }\n\n  private def productReadSingle[E: Type, Mets: Type](\n      rs: Expr[ResultSet],\n      m: Expr[Mirror.ProductOf[E]],\n      res: Vector[Expr[Any]],\n      pos: Expr[Int]\n  )(using Quotes): Expr[E] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        Expr.summon[DbCodec[met]] match\n          case Some(codecExpr) =>\n            '{\n              val posValue = $pos\n              val codec = $codecExpr\n              val metValue = codec.readSingle($rs, posValue)\n              val newPos = posValue + codec.cols.length\n              ${\n                productReadSingle[E, metTail](\n                  rs,\n                  m,\n                  res :+ '{ metValue },\n                  '{ newPos }\n                )\n              }\n            }\n          case None =>\n            Expr.summon[ClassTag[met]] match\n              case Some(clsTagExpr) =>\n                report.info(\n                  s\"Could not find DbCodec for ${TypeRepr.of[met].show}. Defaulting to ResultSet::[get|set]Object\"\n                )\n                '{\n                  val posValue = $pos\n                  val metValue = $rs.getObject(\n                    posValue,\n                    $clsTagExpr.runtimeClass.asInstanceOf[Class[met]]\n                  )\n                  val newPos = posValue + 1\n                  ${\n                    productReadSingle[E, metTail](\n                      rs,\n                      m,\n                      res :+ '{ metValue },\n                      '{ newPos }\n                    )\n                  }\n                }\n              case None =>\n                report.errorAndAbort(\n                  \"Could not find DbCodec or ClassTag for ${TypeRepr.of[met].show}\"\n                )\n      case '[EmptyTuple] =>\n        '{\n          val product = ${ Expr.ofTupleFromSeq(res) }\n          $m.fromProduct(product)\n        }\n    end match\n  end productReadSingle\n\n  private def productReadOption[E: Type, Mets: Type](\n      rs: Expr[ResultSet],\n      m: Expr[Mirror.ProductOf[E]],\n      res: Vector[Expr[Any]],\n      pos: Expr[Int]\n  )(using Quotes): Expr[Option[E]] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        Expr.summon[DbCodec[met]] match\n          case Some(codecExpr) =>\n            '{\n              val posValue = $pos\n              val codec = $codecExpr\n              codec.readSingleOption($rs, posValue) match\n                case Some(metValue) =>\n                  val newPos = posValue + codec.cols.length\n                  ${\n                    productReadOption[E, metTail](\n                      rs,\n                      m,\n                      res :+ '{ metValue },\n                      '{ newPos }\n                    )\n                  }\n                case None => None\n            }\n          case None =>\n            Expr.summon[ClassTag[met]] match\n              case Some(clsTagExpr) =>\n                report.info(\n                  s\"Could not find DbCodec for ${TypeRepr.of[met].show}. Defaulting to ResultSet::[get|set]Object\"\n                )\n                '{\n                  val posValue = $pos\n                  val metValue = $rs.getObject(\n                    posValue,\n                    $clsTagExpr.runtimeClass.asInstanceOf[Class[met]]\n                  )\n                  if $rs.wasNull then None\n                  else\n                    val newPos = posValue + 1\n                    ${\n                      productReadOption[E, metTail](\n                        rs,\n                        m,\n                        res :+ '{ metValue },\n                        '{ newPos }\n                      )\n                    }\n                }\n              case None =>\n                report.errorAndAbort(\n                  \"Could not find DbCodec or ClassTag for ${TypeRepr.of[met].show}\"\n                )\n      case '[EmptyTuple] =>\n        '{\n          val product = ${ Expr.ofTupleFromSeq(res) }\n          Some($m.fromProduct(product))\n        }\n    end match\n  end productReadOption\n\n  private def productWriteSingle[E: Type, Mets: Type](\n      e: Expr[E],\n      ps: Expr[PreparedStatement],\n      pos: Expr[Int],\n      i: Expr[Int]\n  )(using Quotes): Expr[Unit] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        Expr.summon[DbCodec[met]] match\n          case Some(codecExpr) =>\n            '{\n              val iValue = $i\n              val posValue = $pos\n              val metValue = $e\n                .asInstanceOf[Product]\n                .productElement(iValue)\n                .asInstanceOf[met]\n              val codec = $codecExpr\n              codec.writeSingle(metValue, $ps, posValue)\n              val newPos = posValue + $codecExpr.cols.length\n              val newI = iValue + 1\n              ${ productWriteSingle[E, metTail](e, ps, '{ newPos }, '{ newI }) }\n            }\n          case None =>\n            '{\n              val iValue = $i\n              val posValue = $pos\n              val metValue = $e\n                .asInstanceOf[Product]\n                .productElement(iValue)\n              $ps.setObject(posValue, metValue)\n              val newPos = posValue + 1\n              val newI = iValue + 1\n              ${ productWriteSingle[E, metTail](e, ps, '{ newPos }, '{ newI }) }\n            }\n      case '[EmptyTuple] => '{}\n    end match\n  end productWriteSingle\nend DbCodec\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/DbCon.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.Connection\n\n/** Simple wrapper around java.sql.Connection. See\n  * `com.augustnagro.magnum.connect` and `transact`\n  */\nclass DbCon private[magnum] (\n    val connection: Connection,\n    val sqlLogger: SqlLogger\n)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/DbTx.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.Connection\nimport scala.util.Using\n\n/** Represents a transactional [[DbCon]]\n  */\nclass DbTx private[magnum] (connection: Connection, sqlLogger: SqlLogger)\n    extends DbCon(connection, sqlLogger)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/DbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.reflect.ClassTag\nimport scala.deriving.Mirror\n\n/** Factory for Repo default methods */\ntrait DbType:\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID]\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/DerivingUtil.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonFrom,\n  summonInline\n}\nimport scala.quoted.*\nimport scala.reflect.ClassTag\n\n/** Not useful for typical user code; provided to help implement custom DbCodecs\n  * and associated typeclasses\n  */\nobject DerivingUtil:\n  /** For a Simple (non-ADT) enum type E, constructs a sequence of mappings from\n    * sql string representation to enum value. For example,\n    *\n    * {{{\n    *   @Table(PostgresDbType, SqlNameMapper.CamelToUpperSnakeCase)\n    *   enum Color { case Red, @SqlName(\"greeeeeen\") Green, Blue }\n    * }}}\n    *\n    * Results in\n    *\n    * {{{\n    *   Seq(\"Red\" -> Color.Red, \"greeeeeen\" -> Color.Green, \"Blue\" -> Color.Blue)\n    * }}}\n    *\n    * Will produce a compile error if the enum is not simple (non-adt).\n    *\n    * @tparam E\n    *   the enum type, like Color\n    * @tparam Mels\n    *   enum Mirror's MirroredElemLabels\n    * @tparam Mets\n    *   enum Mirror's MirroredElemTypes\n    */\n  def buildSqlNameMapForEnum[\n      E: Type,\n      Mels: Type,\n      Mets: Type\n  ](using q: Quotes): Expr[Seq[(String, E)]] =\n    import q.reflect.*\n    val tableAnnot = TypeRepr.of[Table].typeSymbol\n    val defaultNameMapper: Expr[SqlNameMapper] =\n      TypeRepr\n        .of[E]\n        .typeSymbol\n        .getAnnotation(tableAnnot) match\n        case Some(term) =>\n          val tableExpr = term.asExprOf[Table]\n          '{ $tableExpr.nameMapper }\n        case None =>\n          '{ SqlNameMapper.SameCase }\n\n    val sumValueExprs: Vector[Expr[E]] = sumValues[E, Mets]()\n    val scalaNames = getScalaNames[Mels]()\n\n    val sqlNameAnnot = TypeRepr.of[SqlName].typeSymbol\n    val enumCaseSymbols = TypeRepr.of[E].typeSymbol.children\n\n    val sqlNameExprs: Vector[Expr[(String, E)]] = scalaNames\n      .zip(sumValueExprs)\n      .map((scalaName, sumExpr) =>\n        val nameAnnot = enumCaseSymbols\n          .find(sym => sym.name == scalaName && sym.hasAnnotation(sqlNameAnnot))\n          .flatMap(sym => sym.getAnnotation(sqlNameAnnot))\n        nameAnnot match\n          case Some(term) =>\n            val sqlNameExpr: Expr[SqlName] = term.asExprOf[SqlName]\n            '{ ($sqlNameExpr.name.toString, $sumExpr) }\n          case None =>\n            val scalaNameExpr = Expr(scalaName)\n            '{ ($defaultNameMapper.toColumnName($scalaNameExpr), $sumExpr) }\n      )\n    Expr.ofSeq(sqlNameExprs)\n  end buildSqlNameMapForEnum\n\n  private def getScalaNames[Mels: Type](res: Vector[String] = Vector.empty)(\n      using Quotes\n  ): Vector[String] =\n    import quotes.reflect.*\n    Type.of[Mels] match\n      case '[mel *: melTail] =>\n        val melString = Type.valueOfConstant[mel].get.toString\n        getScalaNames[melTail](res :+ melString)\n      case '[EmptyTuple] => res\n\n  private def sumValues[E: Type, Mets: Type](\n      res: Vector[Expr[E]] = Vector.empty\n  )(using Quotes): Vector[Expr[E]] =\n    import quotes.reflect.*\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        val expr = Expr.summon[Mirror.ProductOf[met]] match\n          case Some(m) if isSingleton[met] =>\n            '{ $m.fromProduct(EmptyTuple).asInstanceOf[E] }\n          case _ =>\n            report.errorAndAbort(\"Can only derive simple (non-adt) enums\")\n        sumValues[E, metTail](res :+ expr)\n      case '[EmptyTuple] => res\n\n  private def isSingleton[T: Type](using Quotes): Boolean =\n    import quotes.reflect.*\n    Expr.summon[Mirror.ProductOf[T]] match\n      case Some('{\n            $mp: Mirror.ProductOf[T] {\n              type MirroredElemTypes = mets\n            }\n          }) =>\n        tupleArity[mets]() == 0\n      case _ => false\n\n  private def tupleArity[T: Type](res: Int = 0)(using Quotes): Int =\n    import quotes.reflect.*\n    Type.of[T] match\n      case '[x *: xs]    => tupleArity[xs](res + 1)\n      case '[EmptyTuple] => res\n\n  /** Finds the first SqlName annotation on type T */\n  def sqlTableNameAnnot[T: Type](using Quotes): Option[Expr[SqlName]] =\n    import quotes.reflect._\n    val annot = TypeRepr.of[SqlName]\n    TypeRepr\n      .of[T]\n      .typeSymbol\n      .annotations\n      .find(_.tpe =:= annot)\n      .map(term => term.asExprOf[SqlName])\n\n  /** Finds the first Table annotation on type T */\n  def tableAnnot[T: Type](using Quotes): Option[Expr[Table]] =\n    import quotes.reflect.*\n    val annot = TypeRepr.of[Table]\n    TypeRepr\n      .of[T]\n      .typeSymbol\n      .annotations\n      .find(_.tpe =:= annot)\n      .map(term => term.asExprOf[Table])\n\nend DerivingUtil\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Frag.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.lang.System.Logger.Level\nimport java.sql.{PreparedStatement, ResultSet, Statement}\nimport scala.collection.immutable.ArraySeq\nimport scala.util.{Failure, Success, Using}\n\n/** Sql fragment */\nclass Frag(\n    val sqlString: String,\n    val params: Seq[Any],\n    val writer: FragWriter\n):\n  def query[E](using reader: DbCodec[E]): Query[E] = Query(this, reader)\n\n  def update: Update = Update(this)\n\n  /** For databases like Postgres that support RETURNING statements via\n    * `getResultSet`\n    */\n  def returning[E](using reader: DbCodec[E]): Returning[E] =\n    Returning(this, reader, Vector.empty)\n\n  /** For databases that support RETURNING statements via `getGeneratedKeys`\n    */\n  def returningKeys[E](colName: String, xs: String*)(using\n      reader: DbCodec[E]\n  ): Returning[E] =\n    Returning(this, reader, colName +: xs)\n\n  /** For databases that support RETURNING statements via `getGeneratedKeys`\n    */\n  def returningKeys[E](colName: ColumnName, xs: ColumnName*)(using\n      reader: DbCodec[E]\n  ): Returning[E] =\n    Returning(this, reader, (colName +: xs).map(_.queryRepr))\n\n  /** For databases that support RETURNING statements via `getGeneratedKeys`\n    */\n  def returningKeys[E](colNames: ColumnNames)(using\n      reader: DbCodec[E]\n  ): Returning[E] =\n    Returning(this, reader, colNames.columnNames.map(_.queryRepr))\n\n  /** Strips leading whitespace characters followed by a specified char from the\n    * beginning of each line in this {@link Frag} .\n    *\n    * This method is useful when you want to format SQL strings in a more\n    * readable multi-line way within your code.\n    *\n    * @param marginChar\n    *   the character that indicates the margin.\n    * @return\n    *   a new {@link Frag} instance with the modified `sqlString`.\n    */\n  def stripMargin(marginChar: Char): Frag =\n    Frag(sqlString.stripMargin(marginChar), params, writer)\n\n  /** Strips leading whitespace characters followed by a vertical bar (`|`) from\n    * the beginning of each line in this {@link Frag} .\n    *\n    * This method is useful when you want to format SQL strings in a more\n    * readable multi-line way within your code.\n    *\n    * @return\n    *   a new {@link Frag} instance with the modified `sqlString`.\n    */\n  def stripMargin: Frag = stripMargin('|')\n\nend Frag\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/FragWriter.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.PreparedStatement\n\ntrait FragWriter:\n  /** Writes a Frag's values to `ps`, staring at postion `pos`. Returns the new\n    * position.\n    */\n  def write(ps: PreparedStatement, pos: Int): Int\n\nobject FragWriter:\n  val empty: FragWriter = (_, pos) => pos\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/H2DbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, JDBCType, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using}\n\nobject H2DbType extends DbType:\n\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val updateKeys: String = eElemNamesSql\n      .lazyZip(eElemCodecs)\n      .map((sqlName, codec) => sqlName + \" = \" + codec.queryRepr)\n      .patch(idIndex, Seq.empty, 1)\n      .mkString(\", \")\n\n    val updateCodecs = eElemCodecs\n      .patch(idIndex, Seq.empty, 1)\n      .appended(idCodec)\n      .asInstanceOf[Seq[DbCodec[Any]]]\n\n    val insertGenKeys: Array[String] = Array.from(eElemNamesSql)\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT * FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT * FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllByIdSql = s\"SELECT * FROM $tableNameSql WHERE $idName = ANY(?)\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"TRUNCATE TABLE $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n    val updateSql =\n      s\"UPDATE $tableNameSql SET $updateKeys WHERE $idName = ${idCodec.queryRepr}\"\n\n    val compositeId = idCodec.cols.distinct.size != 1\n    val idFirstTypeName = JDBCType.valueOf(idCodec.cols.head).getName\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        SpecImpl.Default.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        if compositeId then\n          throw UnsupportedOperationException(\n            \"Composite ids unsupported for findAllById.\"\n          )\n        val idsArray = Array.from[Any](ids)\n        Frag(\n          findAllByIdSql,\n          IArray(idsArray),\n          (ps, pos) =>\n            val sqlArray =\n              ps.getConnection.createArrayOf(idFirstTypeName, idsArray)\n            ps.setArray(pos, sqlArray)\n            pos + 1\n        ).query[E].run()\n//        // h2 doesn't support setObject(..) with primitive arrays,\n//        // so we need to convert to Array[Object]\n//        val builder = Array.newBuilder[Object]\n//        if ids.knownSize > -1 then builder.sizeHint(ids.knownSize)\n//        for id <- ids do builder += id.asInstanceOf[Object]\n//        Sql(findAllByIdSql, Vector(builder.result())).run\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update.run()\n\n      def truncate()(using DbCon): Unit = truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        handleQuery(insertSql, entityCreator):\n          Using.Manager: use =>\n            val ps =\n              use(con.connection.prepareStatement(insertSql, insertGenKeys))\n            ecCodec.writeSingle(entityCreator, ps)\n            timed:\n              ps.executeUpdate()\n              val rs = use(ps.getGeneratedKeys)\n              rs.next()\n              eCodec.readSingle(rs)\n\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        handleQuery(insertSql, entityCreators):\n          Using.Manager: use =>\n            val ps =\n              use(con.connection.prepareStatement(insertSql, insertGenKeys))\n            ecCodec.write(entityCreators, ps)\n            timed:\n              batchUpdateResult(ps.executeBatch())\n              val rs = use(ps.getGeneratedKeys)\n              eCodec.read(rs)\n\n      def update(entity: E)(using con: DbCon): Unit =\n        handleQuery(updateSql, entity):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            val entityValues: Vector[Any] = entity\n              .asInstanceOf[Product]\n              .productIterator\n              .toVector\n            // put ID at the end\n            val updateValues = entityValues\n              .patch(idIndex, Vector.empty, 1)\n              .appended(entityValues(idIndex))\n\n            var pos = 1\n            for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n              codec.writeSingle(field, ps, pos)\n              pos += codec.cols.length\n            timed(ps.executeUpdate())\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(updateSql, entities):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            for entity <- entities do\n              val entityValues: Vector[Any] = entity\n                .asInstanceOf[Product]\n                .productIterator\n                .toVector\n              // put ID at the end\n              val updateValues = entityValues\n                .patch(idIndex, Vector.empty, 1)\n                .appended(entityValues(idIndex))\n\n              var pos = 1\n              for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n                codec.writeSingle(field, ps, pos)\n                pos += codec.cols.length\n              ps.addBatch()\n\n            timed(batchUpdateResult(ps.executeBatch()))\n\n    end new\n  end buildRepoDefaults\nend H2DbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Id.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.annotation.StaticAnnotation\n\nclass Id extends StaticAnnotation\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/ImmutableRepo.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.ResultSet\nimport javax.sql.DataSource\nimport scala.util.{Try, Using}\n\n/** Repository supporting read-only queries. When entity `E` does not have an\n  * id, use `Null` for the `Id` type.\n  * @tparam E\n  *   database entity class\n  * @tparam ID\n  *   id type of E\n  */\nopen class ImmutableRepo[E, ID](using defaults: RepoDefaults[?, E, ID]):\n\n  /** Count of all entities */\n  def count(using DbCon): Long = defaults.count\n\n  /** Returns true if an E exists with the given id */\n  def existsById(id: ID)(using DbCon): Boolean = defaults.existsById(id)\n\n  /** Returns all entity values */\n  def findAll(using DbCon): Vector[E] = defaults.findAll\n\n  /** Find all entities matching the specification. See the scaladoc of [[Spec]]\n    * for more details\n    */\n  def findAll(spec: Spec[E])(using DbCon): Vector[E] = defaults.findAll(spec)\n\n  /** Returns Some(entity) if a matching E is found */\n  def findById(id: ID)(using DbCon): Option[E] = defaults.findById(id)\n\n  /** Find all entities having ids in the Iterable. If an Id is not found, no\n    * error is thrown.\n    */\n  def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n    defaults.findAllById(ids)\n\nend ImmutableRepo\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/MySqlDbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using}\n\nobject MySqlDbType extends DbType:\n\n  private val specImpl = new SpecImpl:\n    override def sortSql(sort: Sort): String =\n      val column = sort.column\n      val nullSort = sort.nullOrder match\n        case NullOrder.Default => \"\"\n        case NullOrder.First   => s\"$column IS NOT NULL, \"\n        case NullOrder.Last    => s\"$column IS NULL, \"\n        case _                 => throw UnsupportedOperationException()\n      val dir = sort.direction match\n        case SortOrder.Default => \"\"\n        case SortOrder.Asc     => \" ASC\"\n        case SortOrder.Desc    => \" DESC\"\n        case _                 => throw UnsupportedOperationException()\n      nullSort + column + dir\n\n    override def offsetLimitSql(\n        offset: Option[Long],\n        limit: Option[Int]\n    ): Option[String] =\n      (offset, limit) match\n        case (Some(o), Some(l)) => Some(s\"LIMIT $o, $l\")\n        case (Some(o), None)    => Some(s\"LIMIT $o, ${Long.MaxValue}\")\n        case (None, Some(l))    => Some(s\"LIMIT $l\")\n        case (None, None)       => None\n\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val insertGenKeys = Array(idName)\n\n    val updateKeys: String = eElemNamesSql\n      .lazyZip(eElemCodecs)\n      .map((sqlName, codec) => sqlName + \" = \" + codec.queryRepr)\n      .patch(idIndex, Seq.empty, 1)\n      .mkString(\", \")\n\n    val updateCodecs = eElemCodecs\n      .patch(idIndex, Seq.empty, 1)\n      .appended(idCodec)\n      .asInstanceOf[Seq[DbCodec[Any]]]\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT * FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT * FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"TRUNCATE TABLE $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n    val updateSql =\n      s\"UPDATE $tableNameSql SET $updateKeys WHERE $idName = ${idCodec.queryRepr}\"\n    val insertAndFindByIdSql = insertSql + \"\\n\" + findByIdSql\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        specImpl.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        throw UnsupportedOperationException(\n          \"MySql does not support 'ANY' keyword, and does not support long IN parameter lists. Use findById in a loop instead.\"\n        )\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update\n          .run()\n\n      def truncate()(using DbCon): Unit = truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        // unfortunately, mysql only will return auto_incremented keys.\n        // it doesn't return default columns, and adding other columns to\n        // the insertGenKeys array doesn't change this behavior.\n        throw UnsupportedOperationException()\n\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        throw UnsupportedOperationException()\n\n      def update(entity: E)(using con: DbCon): Unit =\n        handleQuery(updateSql, entity):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            val entityValues: Vector[Any] = entity\n              .asInstanceOf[Product]\n              .productIterator\n              .toVector\n            // put ID at the end\n            val updateValues = entityValues\n              .patch(idIndex, Vector.empty, 1)\n              .appended(entityValues(idIndex))\n\n            var pos = 1\n            for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n              codec.writeSingle(field, ps, pos)\n              pos += codec.cols.length\n            timed(ps.executeUpdate())\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(updateSql, entities):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            for entity <- entities do\n              val entityValues: Vector[Any] = entity\n                .asInstanceOf[Product]\n                .productIterator\n                .toVector\n              // put ID at the end\n              val updateValues = entityValues\n                .patch(idIndex, Vector.empty, 1)\n                .appended(entityValues(idIndex))\n\n              var pos = 1\n              for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n                codec.writeSingle(field, ps, pos)\n                pos += codec.cols.length\n              ps.addBatch()\n\n            timed(batchUpdateResult(ps.executeBatch()))\n    end new\n  end buildRepoDefaults\nend MySqlDbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/NullOrder.scala",
    "content": "package com.augustnagro.magnum\n\ntrait NullOrder\n\nobject NullOrder:\n  case object Default extends NullOrder\n  case object First extends NullOrder\n  case object Last extends NullOrder\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/OracleDbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using}\n\nobject OracleDbType extends DbType:\n\n  private val specImpl = new SpecImpl:\n    override def offsetLimitSql(\n        offset: Option[Long],\n        limit: Option[Int]\n    ): Option[String] =\n      (offset, limit) match\n        case (Some(o), Some(l)) =>\n          Some(s\"OFFSET $o ROWS FETCH NEXT $l ROWS ONLY\")\n        case (Some(o), None) => Some(s\"OFFSET $o ROWS\")\n        case (None, Some(l)) => Some(s\"FETCH NEXT $l ROWS ONLY\")\n        case (None, None)    => None\n\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val updateKeys: String = eElemNamesSql\n      .lazyZip(eElemCodecs)\n      .map((sqlName, codec) => sqlName + \" = \" + codec.queryRepr)\n      .patch(idIndex, Seq.empty, 1)\n      .mkString(\", \")\n\n    val updateCodecs = eElemCodecs\n      .patch(idIndex, Seq.empty, 1)\n      .appended(idCodec)\n      .asInstanceOf[Seq[DbCodec[Any]]]\n\n    val insertGenKeys = Array.from(eElemNamesSql)\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT * FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT * FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"TRUNCATE TABLE $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n    val updateSql =\n      s\"UPDATE $tableNameSql SET $updateKeys WHERE $idName = ${idCodec.queryRepr}\"\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        specImpl.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        throw UnsupportedOperationException(\n          \"Oracle does not support SQL arrays, and does not support long IN parameter lists. Use findById in a loop instead.\"\n        )\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update\n          .run()\n\n      def truncate()(using DbCon): Unit = truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        handleQuery(insertSql, entityCreator):\n          Using.Manager: use =>\n            val ps =\n              use(con.connection.prepareStatement(insertSql, insertGenKeys))\n            ecCodec.writeSingle(entityCreator, ps)\n            timed:\n              ps.executeUpdate()\n              val rs = use(ps.getGeneratedKeys)\n              rs.next()\n              eCodec.readSingle(rs)\n\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        // oracle jdbc does not support batch RETURNING\n        entityCreators.map(insertReturning).toVector\n\n      def update(entity: E)(using con: DbCon): Unit =\n        handleQuery(updateSql, entity):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            val entityValues: Vector[Any] = entity\n              .asInstanceOf[Product]\n              .productIterator\n              .toVector\n            // put ID at the end\n            val updateValues = entityValues\n              .patch(idIndex, Vector.empty, 1)\n              .appended(entityValues(idIndex))\n\n            var pos = 1\n            for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n              codec.writeSingle(field, ps, pos)\n              pos += codec.cols.length\n            timed(ps.executeUpdate())\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(updateSql, entities):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            for entity <- entities do\n              val entityValues: Vector[Any] = entity\n                .asInstanceOf[Product]\n                .productIterator\n                .toVector\n              // put ID at the end\n              val updateValues = entityValues\n                .patch(idIndex, Vector.empty, 1)\n                .appended(entityValues(idIndex))\n\n              var pos = 1\n              for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n                codec.writeSingle(field, ps, pos)\n                pos += codec.cols.length\n              ps.addBatch()\n\n            timed(batchUpdateResult(ps.executeBatch()))\n    end new\n  end buildRepoDefaults\nend OracleDbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/PostgresDbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, JDBCType, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using}\nimport java.util.StringJoiner\n\nobject PostgresDbType extends DbType:\n\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val updateKeys: String = eElemNamesSql\n      .lazyZip(eElemCodecs)\n      .map((sqlName, codec) => sqlName + \" = \" + codec.queryRepr)\n      .patch(idIndex, Seq.empty, 1)\n      .mkString(\", \")\n\n    val updateCodecs = eElemCodecs\n      .patch(idIndex, Seq.empty, 1)\n      .appended(idCodec)\n      .asInstanceOf[Seq[DbCodec[Any]]]\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT $selectKeys FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT $selectKeys FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllByIdSql =\n      s\"SELECT $selectKeys FROM $tableNameSql WHERE $idName = ANY(?)\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"TRUNCATE TABLE $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n    val updateSql =\n      s\"UPDATE $tableNameSql SET $updateKeys WHERE $idName = ${idCodec.queryRepr}\"\n\n    val compositeId = idCodec.cols.distinct.size != 1\n    val idFirstTypeName = JDBCType.valueOf(idCodec.cols.head).getName\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        SpecImpl.Default.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        if compositeId then\n          throw UnsupportedOperationException(\n            \"Composite ids unsupported for findAllById.\"\n          )\n        val idsArray = Array.from[Any](ids)\n        Frag(\n          findAllByIdSql,\n          IArray(idsArray),\n          (ps, pos) =>\n            val sqlArray =\n              ps.getConnection.createArrayOf(idFirstTypeName, idsArray)\n            ps.setArray(pos, sqlArray)\n            pos + 1\n        ).query[E].run()\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update\n          .run()\n\n      def truncate()(using DbCon): Unit =\n        truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        handleQuery(insertSql, entityCreator):\n          Using.Manager: use =>\n            val ps = use(\n              con.connection\n                .prepareStatement(insertSql, Statement.RETURN_GENERATED_KEYS)\n            )\n            ecCodec.writeSingle(entityCreator, ps)\n            timed:\n              ps.executeUpdate()\n              val rs = use(ps.getGeneratedKeys)\n              rs.next()\n              eCodec.readSingle(rs)\n\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        handleQuery(insertSql, entityCreators):\n          Using.Manager: use =>\n            val ps = use(\n              con.connection\n                .prepareStatement(insertSql, Statement.RETURN_GENERATED_KEYS)\n            )\n            ecCodec.write(entityCreators, ps)\n            timed:\n              batchUpdateResult(ps.executeBatch())\n              val rs = use(ps.getGeneratedKeys)\n              eCodec.read(rs)\n\n      def update(entity: E)(using con: DbCon): Unit =\n        handleQuery(updateSql, entity):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            val entityValues: Vector[Any] = entity\n              .asInstanceOf[Product]\n              .productIterator\n              .toVector\n            // put ID at the end\n            val updateValues = entityValues\n              .patch(idIndex, Vector.empty, 1)\n              .appended(entityValues(idIndex))\n\n            var pos = 1\n            for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n              codec.writeSingle(field, ps, pos)\n              pos += codec.cols.length\n            timed(ps.executeUpdate())\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(updateSql, entities):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            for entity <- entities do\n              val entityValues: Vector[Any] = entity\n                .asInstanceOf[Product]\n                .productIterator\n                .toVector\n              // put ID at the end\n              val updateValues = entityValues\n                .patch(idIndex, Vector.empty, 1)\n                .appended(entityValues(idIndex))\n\n              var pos = 1\n              for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n                codec.writeSingle(field, ps, pos)\n                pos += codec.cols.length\n              ps.addBatch()\n\n            timed(batchUpdateResult(ps.executeBatch()))\n    end new\n  end buildRepoDefaults\nend PostgresDbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Query.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.util.concurrent.TimeUnit\nimport scala.concurrent.duration.FiniteDuration\nimport scala.util.Using.Manager\nimport scala.util.control.NonFatal\nimport scala.util.{Failure, Success, Try, Using}\n\nclass Query[E] private[magnum] (val frag: Frag, reader: DbCodec[E]):\n\n  def run()(using con: DbCon): Vector[E] =\n    handleQuery(frag.sqlString, frag.params):\n      Using.Manager: use =>\n        val ps = use(con.connection.prepareStatement(frag.sqlString))\n        frag.writer.write(ps, 1)\n        timed:\n          val rs = use(ps.executeQuery())\n          reader.read(rs)\n\n  /** Streaming [[Iterator]]. Set [[fetchSize]] to give the JDBC driver a hint\n    * as to how many rows to fetch per request\n    */\n  def iterator(\n      fetchSize: Int = 0\n  )(using con: DbCon, use: Manager): Iterator[E] =\n    handleQuery(frag.sqlString, frag.params):\n      Try:\n        val ps = use(con.connection.prepareStatement(frag.sqlString))\n        ps.setFetchSize(fetchSize)\n        frag.writer.write(ps, 1)\n        timed:\n          val rs = use(ps.executeQuery())\n          ResultSetIterator(rs, frag, reader, con.sqlLogger)\n\nend Query\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Repo.scala",
    "content": "package com.augustnagro.magnum\n\nimport javax.sql.DataSource\n\n/** A read & write data repository\n  *\n  * @tparam EC\n  *   'Entity Creator', which should have all fields of E minus those\n  *   auto-generated by the database. Can be the same type as E.\n  * @tparam E\n  *   database entity class\n  * @tparam ID\n  *   id type of E\n  */\nopen class Repo[EC, E, ID](using defaults: RepoDefaults[EC, E, ID])\n    extends ImmutableRepo[E, ID]:\n\n  /** Deletes an entity using its id */\n  def delete(entity: E)(using DbCon): Unit = defaults.delete(entity)\n\n  /** Deletes an entity using its id */\n  def deleteById(id: ID)(using DbCon): Unit = defaults.deleteById(id)\n\n  /** Deletes ALL entities */\n  def truncate()(using DbCon): Unit = defaults.truncate()\n\n  /** Delete all provided entities */\n  def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n    defaults.deleteAll(entities)\n\n  /** Deletes all entities with an Iterable of ids */\n  def deleteAllById(ids: Iterable[ID])(using DbCon): BatchUpdateResult =\n    defaults.deleteAllById(ids)\n\n  /** Insert and return entity E */\n  def insert(entityCreator: EC)(using DbCon): Unit =\n    defaults.insert(entityCreator)\n\n  /** Insert and return all new entities */\n  def insertAll(entityCreators: Iterable[EC])(using DbCon): Unit =\n    defaults.insertAll(entityCreators)\n\n  def insertReturning(entityCreator: EC)(using DbCon): E =\n    defaults.insertReturning(entityCreator)\n\n  def insertAllReturning(entityCreators: Iterable[EC])(using DbCon): Vector[E] =\n    defaults.insertAllReturning(entityCreators)\n\n  /** Update the entity */\n  def update(entity: E)(using DbCon): Unit = defaults.update(entity)\n\n  /** Update all entities */\n  def updateAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n    defaults.updateAll(entities)\n\nend Repo\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/RepoDefaults.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.compiletime.*\nimport scala.deriving.*\nimport scala.quoted.*\nimport scala.reflect.ClassTag\n\ntrait RepoDefaults[EC, E, ID]:\n  def count(using DbCon): Long\n  def existsById(id: ID)(using DbCon): Boolean\n  def findAll(using DbCon): Vector[E]\n  def findAll(spec: Spec[E])(using DbCon): Vector[E]\n  def findById(id: ID)(using DbCon): Option[E]\n  def findAllById(ids: Iterable[ID])(using DbCon): Vector[E]\n  def delete(entity: E)(using DbCon): Unit\n  def deleteById(id: ID)(using DbCon): Unit\n  def truncate()(using DbCon): Unit\n  def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult\n  def deleteAllById(ids: Iterable[ID])(using DbCon): BatchUpdateResult\n  def insert(entityCreator: EC)(using DbCon): Unit\n  def insertAll(entityCreators: Iterable[EC])(using DbCon): Unit\n  def insertReturning(entityCreator: EC)(using DbCon): E\n  def insertAllReturning(entityCreators: Iterable[EC])(using DbCon): Vector[E]\n  def update(entity: E)(using DbCon): Unit\n  def updateAll(entities: Iterable[E])(using DbCon): BatchUpdateResult\n\nobject RepoDefaults:\n\n  inline given genImmutableRepo[E: DbCodec: Mirror.Of, ID]\n      : RepoDefaults[E, E, ID] =\n    genRepo[E, E, ID]\n\n  inline given genRepo[\n      EC: DbCodec: Mirror.Of,\n      E: DbCodec: Mirror.Of,\n      ID\n  ]: RepoDefaults[EC, E, ID] = ${ genImpl[EC, E, ID] }\n\n  private def genImpl[EC: Type, E: Type, ID: Type](using\n      Quotes\n  ): Expr[RepoDefaults[EC, E, ID]] =\n    import quotes.reflect.*\n    val exprs = tableExprs[EC, E, ID]\n    val eElemCodecs = getEElemCodecs[E]\n    val eCodec = Expr.summon[DbCodec[E]].get\n    val ecCodec = Expr.summon[DbCodec[EC]].get\n    val idCodec =\n      if TypeRepr.of[ID] =:= TypeRepr.of[Null] then\n        '{ DbCodec.AnyCodec.asInstanceOf[DbCodec[ID]] }\n      else Expr.summon[DbCodec[ID]].get\n    val eClassTag = Expr.summon[ClassTag[E]].get\n    val ecClassTag = Expr.summon[ClassTag[EC]].get\n    val idClassTag =\n      if TypeRepr.of[ID] =:= TypeRepr.of[Null] then\n        '{ ClassTag.Any.asInstanceOf[ClassTag[ID]] }\n      else Expr.summon[ClassTag[ID]].get\n    '{\n      ${ exprs.tableAnnot }.dbType.buildRepoDefaults[EC, E, ID](\n        ${ exprs.tableNameSql },\n        ${ Expr(exprs.eElemNames) },\n        ${ Expr.ofSeq(exprs.eElemNamesSql) },\n        $eElemCodecs,\n        ${ Expr(exprs.ecElemNames) },\n        ${ Expr.ofSeq(exprs.ecElemNamesSql) },\n        ${ exprs.idIndex }\n      )(using\n        $eCodec,\n        $ecCodec,\n        $idCodec,\n        $eClassTag,\n        $ecClassTag,\n        $idClassTag\n      )\n    }\n  end genImpl\n\n  private def getEElemCodecs[E: Type](using Quotes): Expr[Seq[DbCodec[?]]] =\n    import quotes.reflect.*\n    Expr.summon[Mirror.ProductOf[E]] match\n      case Some('{\n            $m: Mirror.ProductOf[E] {\n              type MirroredElemTypes = mets\n            }\n          }) =>\n        getProductCodecs[mets]()\n      case _ =>\n        val sumCodec = Expr.summon[DbCodec[E]].get\n        '{ Seq($sumCodec) }\n\n  private def getProductCodecs[Mets: Type](\n      res: Vector[Expr[DbCodec[?]]] = Vector.empty\n  )(using Quotes): Expr[Seq[DbCodec[?]]] =\n    Type.of[Mets] match\n      case '[met *: metTail] =>\n        Expr.summon[DbCodec[met]] match\n          case Some(codec) => getProductCodecs[metTail](res :+ codec)\n          case None => getProductCodecs[metTail](res :+ '{ DbCodec.AnyCodec })\n      case '[EmptyTuple] => Expr.ofSeq(res)\n\nend RepoDefaults\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/ResultSetIterator.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.ResultSet\nimport scala.util.control.NonFatal\n\nprivate class ResultSetIterator[E](\n    rs: ResultSet,\n    frag: Frag,\n    reader: DbCodec[E],\n    sqlLogger: SqlLogger\n) extends Iterator[E] {\n\n  private var rsHasNext: Boolean =\n    try rs.next()\n    catch\n      case NonFatal(t) =>\n        throw SqlException(\n          sqlLogger.exceptionMsg(\n            SqlExceptionEvent(frag.sqlString, frag.params, t)\n          ),\n          t\n        )\n\n  override def hasNext: Boolean = rsHasNext\n\n  override def next(): E =\n    if !rsHasNext then throw IllegalStateException(\"ResultSet is empty\")\n    try\n      val e = reader.readSingle(rs)\n      rsHasNext = rs.next()\n      e\n    catch\n      case NonFatal(t) =>\n        throw SqlException(\n          sqlLogger.exceptionMsg(\n            SqlExceptionEvent(frag.sqlString, frag.params, t)\n          ),\n          t\n        )\n\n}\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Returning.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.util.{Failure, Success, Try, Using}\nimport Using.Manager\nimport java.sql.Statement\nimport java.sql.ResultSet\n\nclass Returning[E] private[magnum] (\n    val frag: Frag,\n    reader: DbCodec[E],\n    keyColumns: Iterable[String]\n):\n  def run()(using con: DbCon): Vector[E] =\n    withResultSet(reader.read)\n\n  /** Streaming [[Iterator]]. Set [[fetchSize]] to give the JDBC driver a hint\n    * as to how many rows to fetch per request\n    */\n  def iterator(\n      fetchSize: Int = 0\n  )(using con: DbCon, use: Manager): Iterator[E] =\n    withResultSet(ResultSetIterator(_, frag, reader, con.sqlLogger))\n\n  private def withResultSet[A](f: ResultSet => A)(using con: DbCon): A =\n    handleQuery(frag.sqlString, frag.params):\n      Manager: use =>\n        if keyColumns.isEmpty then\n          val ps = use(con.connection.prepareStatement(frag.sqlString))\n          frag.writer.write(ps, 1)\n          timed:\n            val hasResults = ps.execute()\n            if hasResults then\n              val rs = use(ps.getResultSet)\n              f(rs)\n            else\n              throw UnsupportedOperationException(\n                \"No results for RETURNING clause\"\n              )\n        else\n          val ps = use(\n            con.connection.prepareStatement(frag.sqlString, keyColumns.toArray)\n          )\n          frag.writer.write(ps, 1)\n          timed:\n            ps.execute()\n            val rs = use(ps.getGeneratedKeys)\n            f(rs)\n\nend Returning\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Seek.scala",
    "content": "package com.augustnagro.magnum\n\nclass Seek private[magnum] (\n    val column: String,\n    val seekDirection: SeekDir,\n    val value: Any,\n    val columnSort: SortOrder,\n    val nullOrder: NullOrder,\n    val codec: DbCodec[?]\n)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SeekDir.scala",
    "content": "package com.augustnagro.magnum\n\ntrait SeekDir\n\nobject SeekDir:\n  case object Gt extends SeekDir\n  case object Lt extends SeekDir\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Sort.scala",
    "content": "package com.augustnagro.magnum\n\nclass Sort private[magnum] (\n    val column: String,\n    val direction: SortOrder,\n    val nullOrder: NullOrder\n)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SortOrder.scala",
    "content": "package com.augustnagro.magnum\n\ntrait SortOrder\n\nobject SortOrder:\n  case object Default extends SortOrder\n  case object Asc extends SortOrder\n  case object Desc extends SortOrder\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Spec.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.util.StringJoiner\n\nclass Spec[E] private (\n    val prefix: Option[Frag],\n    val predicates: Vector[Frag],\n    val limit: Option[Int],\n    val offset: Option[Long],\n    val sorts: Vector[Sort],\n    val seeks: Vector[Seek]\n):\n\n  def prefix(sql: Frag): Spec[E] =\n    new Spec(Some(sql), predicates, limit, offset, sorts, seeks)\n\n  def where(sql: Frag): Spec[E] =\n    new Spec(prefix, predicates :+ sql, limit, offset, sorts, seeks)\n\n  def orderBy(\n      column: String,\n      direction: SortOrder = SortOrder.Default,\n      nullOrder: NullOrder = NullOrder.Default\n  ): Spec[E] =\n    val sort = Sort(column, direction, nullOrder)\n    new Spec(prefix, predicates, limit, offset, sorts :+ sort, seeks)\n\n  def limit(limit: Int): Spec[E] =\n    new Spec(prefix, predicates, Some(limit), offset, sorts, seeks)\n\n  def offset(offset: Long): Spec[E] =\n    new Spec(prefix, predicates, limit, Some(offset), sorts, seeks)\n\n  def seek[V](\n      column: String,\n      seekDirection: SeekDir,\n      value: V,\n      columnSort: SortOrder,\n      nullOrder: NullOrder = NullOrder.Default\n  )(using codec: DbCodec[V]): Spec[E] =\n    val seek = Seek(column, seekDirection, value, columnSort, nullOrder, codec)\n    new Spec(prefix, predicates, limit, offset, sorts, seeks :+ seek)\n\nend Spec\n\nobject Spec:\n  def apply[E]: Spec[E] =\n    new Spec(None, Vector.empty, None, None, Vector.empty, Vector.empty)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SpecImpl.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.util.StringJoiner\n\nprivate trait SpecImpl:\n  def sortSql(sort: Sort): String =\n    val dir = sort.direction match\n      case SortOrder.Default => \"\"\n      case SortOrder.Asc     => \" ASC\"\n      case SortOrder.Desc    => \" DESC\"\n      case _                 => throw UnsupportedOperationException()\n    val nullOrder = sort.nullOrder match\n      case NullOrder.Default => \"\"\n      case NullOrder.First   => \" NULLS FIRST\"\n      case NullOrder.Last    => \" NULLS LAST\"\n      case _                 => throw UnsupportedOperationException()\n    sort.column + dir + nullOrder\n\n  def offsetLimitSql(offset: Option[Long], limit: Option[Int]): Option[String] =\n    (offset, limit) match\n      case (Some(o), Some(l)) => Some(s\"OFFSET $o LIMIT $l\")\n      case (Some(o), None)    => Some(s\"OFFSET $o\")\n      case (None, Some(l))    => Some(s\"LIMIT $l\")\n      case (None, None)       => None\n\n  def seekSql(seek: Seek): String =\n    val seekDir = seek.seekDirection match\n      case SeekDir.Gt => \">\"\n      case SeekDir.Lt => \"<\"\n      case _          => throw UnsupportedOperationException()\n    s\"${seek.column} $seekDir ?\"\n\n  def findAll[E: DbCodec](spec: Spec[E], tableNameSql: String)(using\n      DbCon\n  ): Vector[E] =\n    val whereClause = StringJoiner(\" AND \", \"WHERE \", \"\").setEmptyValue(\"\")\n\n    val allParams = Vector.newBuilder[Any]\n\n    val tableNameLiteral = SqlLiteral(tableNameSql)\n    val prefixFrag = spec.prefix.getOrElse(sql\"SELECT * FROM $tableNameLiteral\")\n    allParams ++= prefixFrag.params\n\n    val seekPredicates = spec.seeks.map(seek =>\n      val codec = seek.codec.asInstanceOf[DbCodec[Any]]\n      Frag(\n        seekSql(seek),\n        Vector(seek.value),\n        (ps, pos) =>\n          codec.writeSingle(seek.value, ps, pos)\n          pos + codec.cols.length\n      )\n    )\n\n    val whereFrags =\n      (spec.predicates ++ seekPredicates).filter(_.sqlString.nonEmpty)\n    for frag <- whereFrags do\n      whereClause.add(\"(\" + frag.sqlString + \")\")\n      allParams ++= frag.params\n\n    val seekSorts =\n      spec.seeks.map(seek => Sort(seek.column, seek.columnSort, seek.nullOrder))\n    val orderByClause =\n      StringJoiner(\", \", \"ORDER BY \", \"\").setEmptyValue(\"\")\n    for sort <- spec.sorts ++ seekSorts do orderByClause.add(sortSql(sort))\n\n    val finalSj = StringJoiner(\" \")\n    if prefixFrag.sqlString.nonEmpty then finalSj.add(prefixFrag.sqlString)\n    val whereClauseStr = whereClause.toString\n    if whereClauseStr.nonEmpty then finalSj.add(whereClauseStr)\n    val orderByClauseStr = orderByClause.toString\n    if orderByClauseStr.nonEmpty then finalSj.add(orderByClauseStr)\n\n    for offsetLimit <- offsetLimitSql(spec.offset, spec.limit) do\n      finalSj.add(offsetLimit)\n\n    val allFrags = prefixFrag +: whereFrags\n    val fragWriter: FragWriter = (ps, startingPos) =>\n      allFrags.foldLeft(startingPos)((pos, frag) => frag.writer.write(ps, pos))\n\n    Frag(finalSj.toString, allParams.result(), fragWriter)\n      .query[E]\n      .run()\n  end findAll\nend SpecImpl\n\nprivate object SpecImpl:\n  object Default extends SpecImpl\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlException.scala",
    "content": "package com.augustnagro.magnum\n\nclass SqlException private[magnum] (message: String, cause: Throwable = null)\n    extends RuntimeException(message, cause)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlExceptionEvent.scala",
    "content": "package com.augustnagro.magnum\n\n/** Metadata for a exceptional SQL statement. */\nclass SqlExceptionEvent private[magnum] (\n    /** The SQL string */\n    val sql: String,\n    anyParams: Any,\n    /** Cause of the exception */\n    val cause: Throwable\n):\n  /** The parameters used when executing. The type is `Iterator[Iterator[Any]]`\n    * to support logging batched updates. For example,\n    * {{{\n    *   repo.insert(User(a, b, c)) // provides Iterator(Iterator(a, b, c))\n    *   repo.insertAll(List(User(a, b, c), User(d, e, f))) // provides Iterator(Iterator(a, b, c), Iterator(d, e, f))\n    * }}}\n    */\n  def params: Iterator[Iterator[Any]] = parseParams(anyParams)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlLiteral.scala",
    "content": "package com.augustnagro.magnum\n\n/** A SQL string that is interpolated directly into a sql\"\" query (and not as a\n  * PreparedStatement parameter)\n  *\n  * For example,\n  *\n  * {{{\n  *   val myQaSchema = SqlLiteral(\"db_qa\")\n  *   sql\"SELECT * FROM $myQaSchema.table_name\"\n  * }}}\n  *\n  * Generates the SQL:\n  * {{{\n  *   \"SELECT * FROM db_qa.table_name\"\n  * }}}\n  */\ntrait SqlLiteral:\n  def queryRepr: String\n\nobject SqlLiteral:\n  def apply(s: String): SqlLiteral =\n    new SqlLiteral:\n      def queryRepr: String = s\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlLogger.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.lang.System.Logger.Level\nimport scala.concurrent.duration.FiniteDuration\n\n/** Trait to provide logging of Magnum SQL statements.\n  */\ntrait SqlLogger:\n  /** Log a successful SQL statement execution. If a query fails a\n    * [[SqlException]] will be thrown, and this logger will not be triggered.\n    */\n  def log(successEvent: SqlSuccessEvent): Unit\n\n  /** Constructs the exception message for [[SqlException]]s */\n  def exceptionMsg(exceptionEvent: SqlExceptionEvent): String\n\nobject SqlLogger:\n  object NoOp extends SqlLogger:\n    override def log(successEvent: SqlSuccessEvent): Unit = ()\n    override def exceptionMsg(exceptionEvent: SqlExceptionEvent): String =\n      exceptionEvent.cause.getMessage\n\n  object Default extends SqlLogger:\n    override def log(successEvent: SqlSuccessEvent): Unit =\n      if Log.isLoggable(Level.TRACE) then\n        Log.log(\n          Level.TRACE,\n          s\"\"\"Executed Query in ${successEvent.execTime}:\n             |${successEvent.sql}\n             |\n             |With values:\n             |${paramsString(successEvent.params)}\n             |\"\"\".stripMargin\n        )\n      else if Log.isLoggable(Level.DEBUG) then\n        Log.log(\n          Level.DEBUG,\n          s\"\"\"Executed Query in ${successEvent.execTime}:\n             |${successEvent.sql}\n             |\"\"\".stripMargin\n        )\n\n    override def exceptionMsg(exceptionEvent: SqlExceptionEvent): String =\n      if Log.isLoggable(System.Logger.Level.TRACE) then\n        s\"\"\"Error executing query:\n           |${exceptionEvent.sql}\n           |With message:\n           |${exceptionEvent.cause.getMessage}\n           |And values:\n           |${paramsString(exceptionEvent.params)}\n           |\"\"\".stripMargin\n      else s\"\"\"Error executing query:\n              |${exceptionEvent.sql}\n              |With message:\n              |${exceptionEvent.cause}\n              |\"\"\".stripMargin\n  end Default\n\n  def logSlowQueries(slowerThan: FiniteDuration): SqlLogger = new:\n    override def log(logEvent: SqlSuccessEvent): Unit =\n      if logEvent.execTime > slowerThan then\n        if Log.isLoggable(Level.TRACE) then\n          Log.log(\n            Level.WARNING,\n            s\"\"\"Executed SLOW Query in ${logEvent.execTime}:\n               |${logEvent.sql}\n               |\n               |With values:\n               |${paramsString(logEvent.params)}\n               |\"\"\".stripMargin\n          )\n        else if Log.isLoggable(Level.WARNING) then\n          Log.log(\n            Level.WARNING,\n            s\"\"\"Executed SLOW Query in ${logEvent.execTime}:\n               |${logEvent.sql}\n               |\"\"\".stripMargin\n          )\n        end if\n      else Default.log(logEvent)\n\n    override def exceptionMsg(exceptionEvent: SqlExceptionEvent): String =\n      Default.exceptionMsg(exceptionEvent)\nend SqlLogger\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlName.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.annotation.StaticAnnotation\n\nclass SqlName(val name: String) extends StaticAnnotation\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlNameMapper.scala",
    "content": "package com.augustnagro.magnum\n\n/** Mapping from scala terms to sql terms */\ntrait SqlNameMapper:\n  def toColumnName(scalaName: String): String\n  def toTableName(scalaName: String): String\n\nobject SqlNameMapper:\n\n  /** Converts camelCase scala names to snake_case */\n  object CamelToSnakeCase extends SqlNameMapper:\n\n    def toColumnName(scalaName: String): String = toCase(scalaName)\n\n    def toTableName(scalaName: String): String = toCase(scalaName)\n\n    private def toCase(scalaName: String): String =\n      val res = StringBuilder().append(scalaName.head.toLower)\n      for i <- 1 until scalaName.length do\n        val c = scalaName.charAt(i)\n        if c.isUpper then res.append('_').append(c.toLower)\n        else res.append(c)\n      res.result()\n\n  object CamelToUpperSnakeCase extends SqlNameMapper:\n    def toColumnName(scalaName: String): String = toCase(scalaName)\n\n    def toTableName(scalaName: String): String = toCase(scalaName)\n\n    private def toCase(scalaName: String): String =\n      val res = StringBuilder().append(scalaName.head.toUpper)\n      for i <- 1 until scalaName.length do\n        val c = scalaName.charAt(i)\n        if c.isUpper then res.append('_').append(c)\n        else res.append(c.toUpper)\n      res.result()\n\n  /** SqlNameMapper that keeps the same case as the provided scala names */\n  object SameCase extends SqlNameMapper:\n    def toColumnName(scalaName: String): String = scalaName\n    def toTableName(scalaName: String): String = scalaName\nend SqlNameMapper\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqlSuccessEvent.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.concurrent.duration.FiniteDuration\n\n/** Metadata for a successfully executed SQL statement. */\nclass SqlSuccessEvent private[magnum] (\n    /** The SQL String */\n    val sql: String,\n    anyParams: Any,\n    /** Time taken to execute the query, fetch data, and build the results. Does\n      * not include time to construct the preparedStatement. For streaming\n      * methods like `Query.iterator`, sqlExecTime is only calculated for the\n      * first fetch.\n      */\n    val execTime: FiniteDuration\n):\n  /** The parameters used when executing. The type is `Iterator[Iterator[Any]]`\n    * to support logging batched updates. For example,\n    * {{{\n    *   repo.insert(User(a, b, c)) // provides Iterator(Iterator(a, b, c))\n    *   repo.insertAll(List(User(a, b, c), User(d, e, f))) // provides Iterator(Iterator(a, b, c), Iterator(d, e, f))\n    * }}}\n    */\n  def params: Iterator[Iterator[Any]] = parseParams(anyParams)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/SqliteDbType.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{Connection, PreparedStatement, ResultSet, Statement}\nimport java.time.OffsetDateTime\nimport scala.collection.View\nimport scala.deriving.Mirror\nimport scala.reflect.ClassTag\nimport scala.util.{Failure, Success, Using}\n\nobject SqliteDbType extends DbType:\n\n  private val specImpl = new SpecImpl:\n    override def offsetLimitSql(\n        offset: Option[Long],\n        limit: Option[Int]\n    ): Option[String] =\n      (offset, limit) match\n        case (Some(o), Some(l)) => Some(s\"LIMIT $o, $l\")\n        case (Some(o), None)    => Some(s\"LIMIT $o, ${Long.MaxValue}\")\n        case (None, Some(l))    => Some(s\"LIMIT $l\")\n        case (None, None)       => None\n\n  def buildRepoDefaults[EC, E, ID](\n      tableNameSql: String,\n      eElemNames: Seq[String],\n      eElemNamesSql: Seq[String],\n      eElemCodecs: Seq[DbCodec[?]],\n      ecElemNames: Seq[String],\n      ecElemNamesSql: Seq[String],\n      idIndex: Int\n  )(using\n      eCodec: DbCodec[E],\n      ecCodec: DbCodec[EC],\n      idCodec: DbCodec[ID],\n      eClassTag: ClassTag[E],\n      ecClassTag: ClassTag[EC],\n      idClassTag: ClassTag[ID]\n  ): RepoDefaults[EC, E, ID] =\n    val idName = eElemNamesSql(idIndex)\n    val selectKeys = eElemNamesSql.mkString(\", \")\n    val ecInsertKeys = ecElemNamesSql.mkString(\"(\", \", \", \")\")\n\n    val updateKeys: String = eElemNamesSql\n      .lazyZip(eElemCodecs)\n      .map((sqlName, codec) => sqlName + \" = \" + codec.queryRepr)\n      .patch(idIndex, Seq.empty, 1)\n      .mkString(\", \")\n\n    val updateCodecs = eElemCodecs\n      .patch(idIndex, Seq.empty, 1)\n      .appended(idCodec)\n      .asInstanceOf[Seq[DbCodec[Any]]]\n\n    val insertGenKeys = eElemNamesSql.toArray\n\n    val countSql = s\"SELECT count(*) FROM $tableNameSql\"\n    val countQuery = Frag(countSql, Vector.empty, FragWriter.empty).query[Long]\n    val existsByIdSql =\n      s\"SELECT 1 FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val findAllSql = s\"SELECT * FROM $tableNameSql\"\n    val findAllQuery = Frag(findAllSql, Vector.empty, FragWriter.empty).query[E]\n    val findByIdSql =\n      s\"SELECT * FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val deleteByIdSql =\n      s\"DELETE FROM $tableNameSql WHERE $idName = ${idCodec.queryRepr}\"\n    val truncateSql = s\"DELETE FROM $tableNameSql\"\n    val truncateUpdate =\n      Frag(truncateSql, Vector.empty, FragWriter.empty).update\n    val insertSql =\n      s\"INSERT INTO $tableNameSql $ecInsertKeys VALUES (${ecCodec.queryRepr})\"\n    val updateSql =\n      s\"UPDATE $tableNameSql SET $updateKeys WHERE $idName = ${idCodec.queryRepr}\"\n\n    def idWriter(id: ID): FragWriter = (ps, pos) =>\n      idCodec.writeSingle(id, ps, pos)\n      pos + idCodec.cols.length\n\n    new RepoDefaults[EC, E, ID]:\n      def count(using con: DbCon): Long = countQuery.run().head\n\n      def existsById(id: ID)(using DbCon): Boolean =\n        Frag(existsByIdSql, IArray(id), idWriter(id))\n          .query[Int]\n          .run()\n          .nonEmpty\n\n      def findAll(using DbCon): Vector[E] = findAllQuery.run()\n\n      def findAll(spec: Spec[E])(using DbCon): Vector[E] =\n        specImpl.findAll(spec, tableNameSql)\n\n      def findById(id: ID)(using DbCon): Option[E] =\n        Frag(findByIdSql, IArray(id), idWriter(id))\n          .query[E]\n          .run()\n          .headOption\n\n      def findAllById(ids: Iterable[ID])(using DbCon): Vector[E] =\n        throw UnsupportedOperationException(\n          \"Sqlite does not support 'ANY' keyword, and does not support long IN parameter lists. Use findById in a loop instead.\"\n        )\n\n      def delete(entity: E)(using DbCon): Unit =\n        deleteById(\n          entity\n            .asInstanceOf[Product]\n            .productElement(idIndex)\n            .asInstanceOf[ID]\n        )\n\n      def deleteById(id: ID)(using DbCon): Unit =\n        Frag(deleteByIdSql, IArray(id), idWriter(id)).update\n          .run()\n\n      def truncate()(using DbCon): Unit =\n        truncateUpdate.run()\n\n      def deleteAll(entities: Iterable[E])(using DbCon): BatchUpdateResult =\n        deleteAllById(\n          entities.map(e =>\n            e.asInstanceOf[Product].productElement(idIndex).asInstanceOf[ID]\n          )\n        )\n\n      def deleteAllById(ids: Iterable[ID])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(deleteByIdSql, ids):\n          Using(con.connection.prepareStatement(deleteByIdSql)): ps =>\n            idCodec.write(ids, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      def insert(entityCreator: EC)(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreator):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.writeSingle(entityCreator, ps)\n            timed(ps.executeUpdate())\n\n      def insertAll(entityCreators: Iterable[EC])(using con: DbCon): Unit =\n        handleQuery(insertSql, entityCreators):\n          Using(con.connection.prepareStatement(insertSql)): ps =>\n            ecCodec.write(entityCreators, ps)\n            timed(batchUpdateResult(ps.executeBatch()))\n\n      // https://github.com/AugustNagro/magnum/issues/87#issuecomment-2591823574\n      def insertReturning(entityCreator: EC)(using con: DbCon): E =\n        throw UnsupportedOperationException()\n\n      // https://github.com/AugustNagro/magnum/issues/87#issuecomment-2591823574\n      def insertAllReturning(\n          entityCreators: Iterable[EC]\n      )(using con: DbCon): Vector[E] =\n        throw UnsupportedOperationException()\n\n      def update(entity: E)(using con: DbCon): Unit =\n        handleQuery(updateSql, entity):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            val entityValues: Vector[Any] = entity\n              .asInstanceOf[Product]\n              .productIterator\n              .toVector\n            // put ID at the end\n            val updateValues = entityValues\n              .patch(idIndex, Vector.empty, 1)\n              .appended(entityValues(idIndex))\n\n            var pos = 1\n            for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n              codec.writeSingle(field, ps, pos)\n              pos += codec.cols.length\n            timed(ps.executeUpdate())\n\n      def updateAll(entities: Iterable[E])(using\n          con: DbCon\n      ): BatchUpdateResult =\n        handleQuery(updateSql, entities):\n          Using(con.connection.prepareStatement(updateSql)): ps =>\n            for entity <- entities do\n              val entityValues: Vector[Any] = entity\n                .asInstanceOf[Product]\n                .productIterator\n                .toVector\n              // put ID at the end\n              val updateValues = entityValues\n                .patch(idIndex, Vector.empty, 1)\n                .appended(entityValues(idIndex))\n\n              var pos = 1\n              for (field, codec) <- updateValues.lazyZip(updateCodecs) do\n                codec.writeSingle(field, ps, pos)\n                pos += codec.cols.length\n              ps.addBatch()\n\n            timed(batchUpdateResult(ps.executeBatch()))\n    end new\n  end buildRepoDefaults\nend SqliteDbType\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Table.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.annotation.StaticAnnotation\n\nclass Table(\n    val dbType: DbType,\n    val nameMapper: SqlNameMapper = SqlNameMapper.SameCase\n) extends StaticAnnotation\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/TableExprs.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.quoted.*\n\nprivate case class TableExprs(\n    tableAnnot: Expr[Table],\n    tableNameScala: Expr[String],\n    tableNameSql: Expr[String],\n    eElemNames: Seq[String],\n    eElemNamesSql: Seq[Expr[String]],\n    ecElemNames: List[String],\n    ecElemNamesSql: Seq[Expr[String]],\n    idIndex: Expr[Int]\n)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/TableInfo.scala",
    "content": "package com.augustnagro.magnum\n\nimport scala.deriving.*\nimport scala.compiletime.*\nimport scala.quoted.*\n\n/** Metadata about a Table, which can be interpolated in sql\"\" expressions\n  *\n  * For example,\n  *\n  * {{{\n  *   @Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\n  *   case class User(@Id id: Long, firstName: String)\n  *     derives DbCodec\n  *\n  *   val u = TableInfo[User, User, Long].alias(\"u\")\n  *\n  *   sql\"SELECT ${u.firstName} FROM $u\".sqlString ==\n  *     \"SELECT u.first_name FROM user u\"\n  * }}}\n  */\nclass TableInfo[EC, E, ID](\n    val all: ColumnNames,\n    val insertColumns: ColumnNames,\n    val alias: Option[String],\n    val queryRepr: String,\n    val idColumn: Option[ColumnName],\n    private[magnum] val table: String,\n    private[magnum] val eClassName: String\n) extends Selectable, SqlLiteral:\n\n  def selectDynamic(scalaName: String): ColumnName =\n    all.columnNames.find(_.scalaName == scalaName).get\n\n  def alias(tableAlias: String): this.type =\n    require(tableAlias.nonEmpty, \"custom tableAlias cannot be empty\")\n    val queryRepr = table + \" \" + tableAlias\n\n    val allSchemaNames = all.columnNames.map(cn =>\n      val sqlName = cn.sqlName\n      ColumnName(\n        scalaName = cn.scalaName,\n        sqlName = sqlName,\n        queryRepr = tableAlias + \".\" + sqlName\n      )\n    )\n    val allQueryRepr = allSchemaNames.map(_.queryRepr).mkString(\", \")\n    val allCols = ColumnNames(allQueryRepr, allSchemaNames)\n    val newIdColumn = idColumn.flatMap(oldId =>\n      allSchemaNames.find(_.scalaName == oldId.scalaName)\n    )\n\n    new TableInfo[EC, E, ID](\n      all = allCols,\n      insertColumns = insertColumns,\n      alias = Some(tableAlias),\n      queryRepr = queryRepr,\n      idColumn = newIdColumn,\n      table = table,\n      eClassName = eClassName\n    ).asInstanceOf[this.type]\n  end alias\n\nend TableInfo\n\nobject TableInfo:\n  transparent inline def apply[EC: Mirror.Of, E: Mirror.Of, ID] =\n    ${ dbSchemaImpl[EC, E, ID] }\n\n  private def dbSchemaImpl[EC: Type, E: Type, ID: Type](using\n      Quotes\n  ): Expr[Any] =\n    import quotes.reflect.*\n    val exprs = tableExprs[EC, E, ID]\n    val refinement = exprs.eElemNames\n      .foldLeft(TypeRepr.of[TableInfo[EC, E, ID]])((typeRepr, elemName) =>\n        Refinement(typeRepr, elemName, TypeRepr.of[ColumnName])\n      )\n\n    val allColumnsExpr = Expr.ofSeq(\n      exprs.eElemNames\n        .lazyZip(exprs.eElemNamesSql)\n        .map((elemName, elemNameSqlExpr) =>\n          '{\n            val elemNameSql = $elemNameSqlExpr\n            ColumnName(${ Expr(elemName) }, elemNameSql, elemNameSql)\n          }\n        )\n    )\n\n    val insertColumnsExpr = Expr.ofSeq(\n      exprs.ecElemNames\n        .lazyZip(exprs.ecElemNamesSql)\n        .map((elemName, elemNameSqlExpr) =>\n          '{\n            val elemNameSql = $elemNameSqlExpr\n            ColumnName(${ Expr(elemName) }, elemNameSql, elemNameSql)\n          }\n        )\n    )\n\n    val idIdx =\n      if TypeRepr.of[ID] =:= TypeRepr.of[Null] then '{ None }\n      else '{ Some(${ exprs.idIndex }) }\n\n    refinement.asType match\n      case '[tpe] =>\n        '{\n          val allColumns = IArray.from($allColumnsExpr)\n          val allQueryRepr = allColumns.map(_.queryRepr).mkString(\", \")\n          val allCols = ColumnNames(allQueryRepr, allColumns)\n\n          val insertColumns = IArray.from($insertColumnsExpr)\n          val insertQueryRepr =\n            insertColumns.map(_.queryRepr).mkString(\"(\", \", \", \")\")\n          val insertCols = ColumnNames(insertQueryRepr, insertColumns)\n          val idColumn = $idIdx.map(idx => allColumns(idx))\n\n          val tableName = ${ exprs.tableNameSql }\n          new TableInfo[EC, E, ID](\n            all = allCols,\n            insertColumns = insertCols,\n            alias = None,\n            table = tableName,\n            queryRepr = tableName,\n            idColumn = idColumn,\n            eClassName = ${ exprs.tableNameScala }\n          ).asInstanceOf[tpe]\n        }\n    end match\n  end dbSchemaImpl\nend TableInfo\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Transactor.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.Connection\nimport javax.sql.DataSource\nimport scala.util.Using\n\nclass Transactor private (\n    dataSource: DataSource,\n    sqlLogger: SqlLogger = SqlLogger.Default,\n    connectionConfig: Connection => Unit = con => ()\n):\n  def withSqlLogger(sqlLogger: SqlLogger): Transactor =\n    new Transactor(dataSource, sqlLogger, connectionConfig)\n\n  def withConnectionConfig(connectionConfig: Connection => Unit): Transactor =\n    new Transactor(dataSource, sqlLogger, connectionConfig)\n\n  def connect[T](f: DbCon ?=> T): T =\n    Using.resource(dataSource.getConnection): con =>\n      connectionConfig(con)\n      f(using DbCon(con, sqlLogger))\n\n  def transact[T](f: DbTx ?=> T): T =\n    Using.resource(dataSource.getConnection): con =>\n      connectionConfig(con)\n      con.setAutoCommit(false)\n      try\n        val res = f(using DbTx(con, sqlLogger))\n        con.commit()\n        res\n      catch\n        case t =>\n          try con.rollback()\n          catch { case t2 => t.addSuppressed(t2) }\n          throw t\nend Transactor\n\nobject Transactor:\n\n  def apply(\n      dataSource: DataSource,\n      sqlLogger: SqlLogger,\n      connectionConfig: Connection => Unit\n  ): Transactor =\n    new Transactor(dataSource, sqlLogger, connectionConfig)\n\n  def apply(dataSource: DataSource, sqlLogger: SqlLogger): Transactor =\n    new Transactor(dataSource, sqlLogger, _ => ())\n\n  def apply(\n      dataSource: DataSource,\n      connectionConfig: Connection => Unit\n  ): Transactor =\n    new Transactor(dataSource, SqlLogger.Default, connectionConfig)\n\n  def apply(dataSource: DataSource): Transactor =\n    new Transactor(dataSource, SqlLogger.Default, _ => ())\n\nend Transactor\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/UUIDCodec.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.sql.{PreparedStatement, ResultSet, Types}\nimport java.util.UUID\n\nobject UUIDCodec:\n  given VarCharUUIDCodec: DbCodec[UUID] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.VARCHAR)\n    def readSingle(rs: ResultSet, pos: Int): UUID =\n      UUID.fromString(rs.getString(pos))\n    def readSingleOption(rs: ResultSet, pos: Int): Option[UUID] =\n      Option(rs.getString(pos)).map(UUID.fromString)\n    def writeSingle(entity: UUID, ps: PreparedStatement, pos: Int): Unit =\n      ps.setString(pos, entity.toString)\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/Update.scala",
    "content": "package com.augustnagro.magnum\n\nimport java.util.concurrent.TimeUnit\nimport scala.concurrent.duration.FiniteDuration\nimport scala.util.{Failure, Success, Using}\n\nclass Update private[magnum] (val frag: Frag):\n  /** Exactly like [[java.sql.PreparedStatement]].executeUpdate */\n  def run()(using con: DbCon): Int =\n    handleQuery(frag.sqlString, frag.params):\n      Using(con.connection.prepareStatement(frag.sqlString)): ps =>\n        frag.writer.write(ps, 1)\n        timed(ps.executeUpdate())\n"
  },
  {
    "path": "magnum/src/main/scala/com/augustnagro/magnum/util.scala",
    "content": "package com.augustnagro.magnum\n\nimport com.augustnagro.magnum.SqlException\n\nimport java.lang.System.Logger.Level\nimport java.sql.{Connection, PreparedStatement, ResultSet, Statement}\nimport java.util.StringJoiner\nimport java.util.concurrent.TimeUnit\nimport javax.sql.DataSource\nimport scala.collection.mutable as m\nimport scala.util.{Failure, Success, Try, Using, boundary}\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonInline\n}\nimport scala.compiletime.ops.any.==\nimport scala.compiletime.ops.boolean.&&\nimport scala.concurrent.duration.FiniteDuration\nimport scala.reflect.ClassTag\nimport scala.quoted.*\n\ndef connect[T](transactor: Transactor)(f: DbCon ?=> T): T =\n  transactor.connect(f)\n\ndef connect[T](dataSource: DataSource)(f: DbCon ?=> T): T =\n  Transactor(dataSource).connect(f)\n\ndef transact[T](transactor: Transactor)(f: DbTx ?=> T): T =\n  transactor.transact(f)\n\ndef transact[T](dataSource: DataSource)(f: DbTx ?=> T): T =\n  Transactor(dataSource).transact(f)\n\ndef transact[T](dataSource: DataSource, connectionConfig: Connection => Unit)(\n    f: DbTx ?=> T\n): T =\n  val transactor =\n    Transactor(dataSource = dataSource, connectionConfig = connectionConfig)\n  transactor.transact(f)\n\nextension (inline sc: StringContext)\n  inline def sql(inline args: Any*): Frag =\n    ${ sqlImpl('{ sc }, '{ args }) }\n\nprivate def sqlImpl(sc: Expr[StringContext], args: Expr[Seq[Any]])(using\n    Quotes\n): Expr[Frag] =\n  import quotes.reflect.*\n  val allArgsExprs: Seq[Expr[Any]] = args match\n    case Varargs(ae) => ae\n//  val stringExprs: Seq[Expr[String]] = sc match\n//    case '{ StringContext(${ Varargs(strings) }: _*) } => strings\n\n  '{\n    val args: Seq[Any] = ${ Expr.ofSeq(allArgsExprs) }\n\n    val sqlQueryReprs: Vector[String] = ${\n      queryReprs(allArgsExprs, '{ args }, '{ Vector.newBuilder })\n    }\n    val queryExpr: String = $sc.s(sqlQueryReprs: _*)\n\n    val flattenedArgs: Vector[Any] = ${\n      flattenedArgsExpr(allArgsExprs, '{ args }, '{ Vector.newBuilder })\n    }\n\n    val writer: FragWriter = (ps: PreparedStatement, pos: Int) => {\n      ${ sqlWriter('{ ps }, '{ pos }, '{ args }, allArgsExprs) }\n    }\n    Frag(queryExpr, flattenedArgs, writer)\n  }\nend sqlImpl\n\nprivate def flattenedArgsExpr(\n    argsExprs: Seq[Expr[Any]],\n    allArgs: Expr[Seq[Any]],\n    builder: Expr[m.Builder[Any, Vector[Any]]],\n    i: Int = 0\n)(using Quotes): Expr[Vector[Any]] =\n  argsExprs match\n    case '{ $arg: SqlLiteral } +: tail =>\n      flattenedArgsExpr(tail, allArgs, builder, i + 1)\n    case '{ $arg: Frag } +: tail =>\n      val newBuilder = '{\n        $builder ++= $allArgs(${ Expr(i) }).asInstanceOf[Frag].params\n      }\n      flattenedArgsExpr(tail, allArgs, newBuilder, i + 1)\n    case '{ $arg: tp } +: tail =>\n      val newBuilder = '{ $builder += $allArgs(${ Expr(i) }) }\n      flattenedArgsExpr(tail, allArgs, newBuilder, i + 1)\n    case Seq() =>\n      '{ $builder.result() }\n\nprivate def queryReprs(\n    argsExprs: Seq[Expr[Any]],\n    allArgs: Expr[Seq[Any]],\n    builder: Expr[m.Builder[String, Vector[String]]],\n    i: Int = 0\n)(using Quotes): Expr[Vector[String]] =\n  argsExprs match\n    case '{ $arg: SqlLiteral } +: tail =>\n      val newBuilder = '{\n        $builder += $allArgs(${ Expr(i) }).asInstanceOf[SqlLiteral].queryRepr\n      }\n      queryReprs(tail, allArgs, newBuilder, i + 1)\n    case '{ $arg: Frag } +: tail =>\n      val newBuilder = '{\n        $builder += $allArgs(${ Expr(i) }).asInstanceOf[Frag].sqlString\n      }\n      queryReprs(tail, allArgs, newBuilder, i + 1)\n    case '{ $arg: tp } +: tail =>\n      val codecExpr = summonWriter[tp]\n      val newBuilder = '{ $builder += $codecExpr.queryRepr }\n      queryReprs(tail, allArgs, newBuilder, i + 1)\n    case Seq() =>\n      '{ $builder.result() }\n\nprivate def sqlWriter(\n    psExpr: Expr[PreparedStatement],\n    posExpr: Expr[Int],\n    args: Expr[Seq[Any]],\n    argsExprs: Seq[Expr[Any]],\n    i: Int = 0\n)(using Quotes): Expr[Int] =\n  import quotes.reflect.*\n  argsExprs match\n    case '{ $arg: SqlLiteral } +: tail =>\n      sqlWriter(psExpr, posExpr, args, tail, i + 1)\n    case '{ $arg: Frag } +: tail =>\n      '{\n        val frag = $args(${ Expr(i) }).asInstanceOf[Frag]\n        val pos = $posExpr\n        val newPos = frag.writer.write($psExpr, pos)\n        ${ sqlWriter(psExpr, '{ newPos }, args, tail, i + 1) }\n      }\n    case '{ $arg: tp } +: tail =>\n      val codecExpr = summonWriter[tp]\n      '{\n        val argValue = $args(${ Expr(i) }).asInstanceOf[tp]\n        val pos = $posExpr\n        val codec = $codecExpr\n        codec.writeSingle(argValue, $psExpr, pos)\n        val newPos = pos + codec.cols.length\n        ${ sqlWriter(psExpr, '{ newPos }, args, tail, i + 1) }\n      }\n    case Seq() => posExpr\n  end match\nend sqlWriter\n\nprivate def summonWriter[T: Type](using Quotes): Expr[DbCodec[T]] =\n  import quotes.reflect.*\n\n  Expr\n    .summon[DbCodec[T]]\n    .orElse(\n      TypeRepr.of[T].widen.asType match\n        case '[tpe] =>\n          Expr\n            .summon[DbCodec[tpe]]\n            .map(codec => '{ $codec.asInstanceOf[DbCodec[T]] })\n    )\n    .getOrElse:\n      report.info(\n        s\"Could not find given DbCodec for ${TypeRepr.of[T].show}. Using PreparedStatement::setObject instead.\"\n      )\n      '{ DbCodec.AnyCodec.asInstanceOf[DbCodec[T]] }\n\ndef batchUpdate[T](values: Iterable[T])(f: T => Update)(using\n    con: DbCon\n): BatchUpdateResult =\n  val it = values.iterator\n  if !it.hasNext then return BatchUpdateResult.Success(0)\n  val firstUpdate = f(it.next())\n  val firstFrag = firstUpdate.frag\n\n  Using.Manager(use =>\n    val ps = use(con.connection.prepareStatement(firstFrag.sqlString))\n    firstFrag.writer.write(ps, 1)\n    ps.addBatch()\n\n    while it.hasNext do\n      val frag = f(it.next()).frag\n      assert(\n        frag.sqlString == firstFrag.sqlString,\n        \"all queries must be the same for batch PreparedStatement\"\n      )\n      frag.writer.write(ps, 1)\n      ps.addBatch()\n    batchUpdateResult(ps.executeBatch())\n  ) match\n    case Success(res) => res\n    case Failure(t) =>\n      throw SqlException(\n        con.sqlLogger.exceptionMsg(\n          SqlExceptionEvent(firstFrag.sqlString, firstFrag.params, t)\n        ),\n        t\n      )\n  end match\nend batchUpdate\n\nprivate val Log = System.getLogger(\"com.augustnagro.magnum\")\n\nprivate def parseParams(params: Any): Iterator[Iterator[Any]] =\n  params match\n    case p: Product => Iterator(p.productIterator)\n    case it: Iterable[?] =>\n      it.headOption match\n        case Some(h: Product) =>\n          it.asInstanceOf[Iterable[Product]]\n            .iterator\n            .map(_.productIterator)\n        case _ =>\n          Iterator(it.iterator)\n    case x => Iterator(Iterator(x))\n\nprivate def paramsString(params: Iterator[Iterator[Any]]): String =\n  params.map(_.mkString(\"(\", \", \", \")\")).mkString(\"\", \",\\n\", \"\\n\")\n\nprivate def timed[T](f: => T): (T, FiniteDuration) =\n  val start = System.currentTimeMillis()\n  val res = f\n  val execTime = FiniteDuration(\n    System.currentTimeMillis() - start,\n    TimeUnit.MILLISECONDS\n  )\n  (res, execTime)\n\nprivate def batchUpdateResult(updateCounts: Array[Int]): BatchUpdateResult =\n  boundary:\n    val updatedRows = updateCounts.foldLeft(0L)((res, c) =>\n      c match\n        case rowCount if rowCount >= 0 =>\n          res + rowCount\n        case Statement.SUCCESS_NO_INFO =>\n          boundary.break(BatchUpdateResult.SuccessNoInfo)\n        case errorCode =>\n          throw RuntimeException(s\"Received JDBC error code $errorCode\")\n    )\n    BatchUpdateResult.Success(updatedRows)\n\nprivate def assertECIsSubsetOfE[EC: Type, E: Type](using Quotes): Unit =\n  import quotes.reflect.*\n  val eRepr = TypeRepr.of[E]\n  val ecRepr = TypeRepr.of[EC]\n  val eFields = eRepr.typeSymbol.caseFields\n  val ecFields = ecRepr.typeSymbol.caseFields\n\n  for ecField <- ecFields do\n    if !eFields.exists(f =>\n        f.name == ecField.name &&\n          f.signature.resultSig == ecField.signature.resultSig\n      )\n    then\n      report.error(\n        s\"\"\"${ecRepr.show} must be an effective subset of ${eRepr.show}.\n           |Are there any fields on ${ecRepr.show} you forgot to update on ${eRepr.show}?\n           |\"\"\".stripMargin\n      )\n\nprivate def tableExprs[EC: Type, E: Type, ID: Type](using\n    Quotes\n): TableExprs =\n  import quotes.reflect.*\n  assertECIsSubsetOfE[EC, E]\n\n  val idIndex = idAnnotIndex[E]\n  val table: Expr[Table] =\n    DerivingUtil.tableAnnot[E] match\n      case Some(table) => table\n      case None =>\n        report.errorAndAbort(\n          s\"${TypeRepr.of[E].show} must have @Table annotation\"\n        )\n  val nameMapper: Expr[SqlNameMapper] = '{ $table.nameMapper }\n\n  Expr.summon[Mirror.Of[E]] match\n    case Some('{\n          $eMirror: Mirror.Of[E] {\n            type MirroredLabel = eLabel\n            type MirroredElemLabels = eMels\n          }\n        }) =>\n      Expr.summon[Mirror.Of[EC]] match\n        case Some('{\n              $ecMirror: Mirror.Of[EC] {\n                type MirroredElemLabels = ecMels\n              }\n            }) =>\n          val tableNameScala = Type.valueOfConstant[eLabel].get.toString\n          val tableNameScalaExpr = Expr(tableNameScala)\n          val tableNameSql = DerivingUtil.sqlTableNameAnnot[E] match\n            case Some(sqlName) => '{ $sqlName.name }\n            case None => '{ $nameMapper.toTableName($tableNameScalaExpr) }\n          val eElemNames = elemNames[eMels]()\n          val eElemNamesSql = eElemNames.map(elemName =>\n            sqlNameAnnot[E](elemName) match\n              case Some(sqlName) => '{ $sqlName.name }\n              case None =>\n                '{ $nameMapper.toColumnName(${ Expr(elemName) }) }\n          )\n          val ecElemNames = elemNames[ecMels]()\n          val ecElemNamesSql = ecElemNames.map(elemName =>\n            sqlNameAnnot[E](elemName) match\n              case Some(sqlName) => '{ $sqlName.name }\n              case None =>\n                '{ $nameMapper.toColumnName(${ Expr(elemName) }) }\n          )\n          TableExprs(\n            table,\n            tableNameScalaExpr,\n            tableNameSql,\n            eElemNames,\n            eElemNamesSql,\n            ecElemNames,\n            ecElemNamesSql,\n            idIndex\n          )\n        case _ =>\n          report.errorAndAbort(\n            s\"A Mirror is required to derive RepoDefaults for ${TypeRepr.of[EC].show}\"\n          )\n    case _ =>\n      report.errorAndAbort(\n        s\"A Mirror is required to derive RepoDefaults for ${TypeRepr.of[E].show}\"\n      )\n  end match\nend tableExprs\n\nprivate def idAnnotIndex[E: Type](using q: Quotes): Expr[Int] =\n  import q.reflect.*\n  val idAnnot = TypeRepr.of[Id].typeSymbol\n  val index = TypeRepr\n    .of[E]\n    .typeSymbol\n    .primaryConstructor\n    .paramSymss\n    .head\n    .indexWhere(sym => sym.hasAnnotation(idAnnot)) match\n    case -1 => 0\n    case x  => x\n  Expr(index)\n\nprivate def elemNames[Mels: Type](res: List[String] = Nil)(using\n    Quotes\n): List[String] =\n  import quotes.reflect.*\n  Type.of[Mels] match\n    case '[mel *: melTail] =>\n      val melString = Type.valueOfConstant[mel].get.toString\n      elemNames[melTail](melString :: res)\n    case '[EmptyTuple] =>\n      res.reverse\n\nprivate def sqlNameAnnot[T: Type](elemName: String)(using\n    Quotes\n): Option[Expr[SqlName]] =\n  import quotes.reflect.*\n  val annot = TypeRepr.of[SqlName].typeSymbol\n  TypeRepr\n    .of[T]\n    .typeSymbol\n    .primaryConstructor\n    .paramSymss\n    .head\n    .find(sym => sym.name == elemName && sym.hasAnnotation(annot))\n    .flatMap(sym => sym.getAnnotation(annot))\n    .map(term => term.asExprOf[SqlName])\n\nprivate def handleQuery[A](sql: String, params: Any)(\n    attempt: Try[(A, FiniteDuration)]\n)(using con: DbCon): A =\n  attempt match\n    case Success((res, execTime)) =>\n      con.sqlLogger.log(SqlSuccessEvent(sql, params, execTime))\n      res\n    case Failure(t) =>\n      val msg = con.sqlLogger.exceptionMsg(SqlExceptionEvent(sql, params, t))\n      throw SqlException(msg, t)\n"
  },
  {
    "path": "magnum/src/test/resources/clickhouse/big-dec.sql",
    "content": "drop table if exists big_dec;\n\ncreate table big_dec (\n    id Int64 NOT NULL,\n    my_big_dec Nullable(Int256)\n)\nENGINE = MergeTree()\nORDER BY id;\n\ninsert into big_dec values\n(1, 123),\n(2, null);"
  },
  {
    "path": "magnum/src/test/resources/clickhouse/car.sql",
    "content": "drop table if exists car;\n\nCREATE TABLE car (\n    model String NOT NULL,\n    id Int64 NOT NULL,\n    top_speed Int32 NOT NULL,\n    vin Nullable(Int32),\n    color Enum('Red', 'Green', 'Blue'),\n    created DateTime NOT NULL\n)\nENGINE = MergeTree()\nORDER BY created;\n\nINSERT INTO car (model, id, top_speed, vin, color, created) VALUES\n('McLaren Senna', 1, 208, 123, 'Red', toDateTime('2024-11-24 22:17:30', 'UTC')),\n('Ferrari F8 Tributo', 2, 212, 124, 'Green', toDateTime('2024-11-24 22:17:31', 'UTC')),\n('Aston Martin Superleggera', 3, 211, null, 'Blue', toDateTime('2024-11-24 22:17:32', 'UTC'));"
  },
  {
    "path": "magnum/src/test/resources/clickhouse/my-time.sql",
    "content": "drop table if exists my_time;\n\ncreate table my_time (\n  a DateTime not null,\n  b Date not null,\n  c String not null,\n  d DateTime not null\n)\nengine = MergeTree()\norder by a;\n\ninsert into my_time values\n(toDateTime('2025-03-30 21:19:23'), toDate('2025-03-30'), '05:20:04', toDateTime('2025-04-02 20:16:38')),\n(toDateTime('2025-03-31 21:19:23'), toDate('2025-03-31'), '05:30:04', toDateTime('2025-04-02T20:17:38'));\n"
  },
  {
    "path": "magnum/src/test/resources/clickhouse/no-id.sql",
    "content": "drop table if exists no_id;\n\nCREATE TABLE no_id (\n    created_at DateTime NOT NULL,\n    user_name String NOT NULL,\n    user_action String NOT NULL\n)\nENGINE = MergeTree()\nORDER BY created_at;\n\nINSERT INTO no_id VALUES\n(timestamp '1997-08-15', 'Josh', 'clicked a button'),\n(timestamp '1997-08-16', 'Danny', 'opened a toaster'),\n(timestamp '1997-08-17', 'Greg', 'ran some QA tests');"
  },
  {
    "path": "magnum/src/test/resources/clickhouse/person.sql",
    "content": "drop table if exists person;\n\ncreate table person (\n    id Int64 not null,\n    first_name Nullable(String),\n    last_name String not null,\n    is_admin Bool not null,\n    created DateTime not null,\n    social_id Nullable(UUID)\n)\nengine = MergeTree()\norder by created;\n\ninsert into person values\n(1, 'George', 'Washington', true, toDateTime('2023-03-05 02:26:00'), toUUID('d06443a6-3efb-46c4-a66a-a80a8a9a5388')),\n(2, 'Alexander', 'Hamilton', true, toDateTime('2023-03-05 02:27:00'), toUUID('529b6c6d-7228-4da5-81d7-13b706f78ddb')),\n(3, 'John', 'Adams', true, toDateTime('2023-03-05 02:28:00'), null),\n(4, 'Benjamin', 'Franklin', true, toDateTime('2023-03-05 02:29:00'), null),\n(5, 'John', 'Jay', true, toDateTime('2023-03-05 02:30:00'), null),\n(6, 'Thomas', 'Jefferson', true, toDateTime('2023-03-05 02:31:00'), null),\n(7, 'James', 'Madison', true, toDateTime('2023-03-05 02:32:00'), null),\n(8, null, 'Nagro', false, toDateTime('2023-03-05 02:33:00'), null);"
  },
  {
    "path": "magnum/src/test/resources/h2/big-dec.sql",
    "content": "drop table if exists big_dec cascade;\n\ncreate table big_dec (\n    id int auto_increment primary key,\n    my_big_dec numeric\n);\n\ninsert into big_dec values\n(1, 123),\n(2, null);"
  },
  {
    "path": "magnum/src/test/resources/h2/car.sql",
    "content": "drop table if exists car;\n\ncreate table car (\n    model varchar(50) not null,\n    id bigint auto_increment primary key,\n    top_speed int not null,\n    vin int,\n    color enum('Red', 'Green', 'Blue'),\n    created timestamp with time zone not null\n);\n\ninsert into car (model, top_speed, vin, color, created) values\n('McLaren Senna', 208, 123, 'Red', '2024-11-24T22:17:30.000000000Z'),\n('Ferrari F8 Tributo', 212, 124, 'Green', '2024-11-24T22:17:31.000000000Z'),\n('Aston Martin Superleggera', 211, null, 'Blue', '2024-11-24T22:17:32.000000000Z');\n"
  },
  {
    "path": "magnum/src/test/resources/h2/my-time.sql",
    "content": "drop table if exists my_time cascade;\n\ncreate table my_time (\n    a timestamp with time zone not null,\n    b date not null,\n    c time not null,\n    d timestamp not null\n);\n\ninsert into my_time values\n('2025-03-30T21:19:23Z', '2025-03-30', '05:20:04', '2025-04-02T20:16:38'),\n('2025-03-31T21:19:23Z', '2025-03-31', '05:30:04', '2025-04-02T20:17:38');\n"
  },
  {
    "path": "magnum/src/test/resources/h2/my-user.sql",
    "content": "drop table if exists my_user cascade;\n\ncreate table my_user (\n    first_name text not null,\n    id bigint auto_increment primary key\n);\n\ninsert into my_user (first_name) values\n('George'),\n('Alexander'),\n('John');\n"
  },
  {
    "path": "magnum/src/test/resources/h2/no-id.sql",
    "content": "drop table if exists no_id;\n\ncreate table no_id (\n    created_at timestamp with time zone default now() not null,\n    user_name varchar not null,\n    user_action varchar not null\n);\n\ninsert into no_id values\n(timestamp '1997-08-15', 'Josh', 'clicked a button'),\n(timestamp '1997-08-16', 'Danny', 'opened a toaster'),\n(timestamp '1997-08-17', 'Greg', 'ran some QA tests');\n"
  },
  {
    "path": "magnum/src/test/resources/h2/person.sql",
    "content": "drop table if exists person cascade;\n\ncreate table person (\n    id bigint primary key,\n    first_name varchar(50),\n    last_name varchar(50) not null,\n    is_admin boolean not null,\n    created timestamp with time zone,\n    social_id UUID\n);\n\ninsert into person (id, first_name, last_name, is_admin, created, social_id) values\n(1, 'George', 'Washington', true, now(), 'd06443a6-3efb-46c4-a66a-a80a8a9a5388'),\n(2, 'Alexander', 'Hamilton', true, now(), '529b6c6d-7228-4da5-81d7-13b706f78ddb'),\n(3, 'John', 'Adams', true, now(), null),\n(4, 'Benjamin', 'Franklin', true, now(), null),\n(5, 'John', 'Jay', true, now(), null),\n(6, 'Thomas', 'Jefferson', true, now(), null),\n(7, 'James', 'Madison', true, now(), null),\n(8, null, 'Nagro', false, now(), null);\n"
  },
  {
    "path": "magnum/src/test/resources/mysql/big-dec.sql",
    "content": "drop table if exists big_dec cascade;\n\ncreate table big_dec (\n    id int primary key,\n    my_big_dec numeric\n);\n\ninsert into big_dec values\n(1, 123),\n(2, null);"
  },
  {
    "path": "magnum/src/test/resources/mysql/car.sql",
    "content": "drop table if exists car;\n\ncreate table car (\n    model varchar(50) not null,\n    id bigint primary key,\n    top_speed int not null,\n    vin int,\n    color enum('Red', 'Green', 'Blue'),\n    created datetime not null\n);\n\ninsert into car (model, id, top_speed, vin, color, created) values\n('McLaren Senna', 1, 208, 123, 'Red', '2024-11-24 22:17:30'),\n('Ferrari F8 Tributo', 2, 212, 124, 'Green', '2024-11-24 22:17:31'),\n('Aston Martin Superleggera', 3, 211, null, 'Blue', '2024-11-24 22:17:32');\n"
  },
  {
    "path": "magnum/src/test/resources/mysql/my-time.sql",
    "content": "drop table if exists my_time cascade;\n\ncreate table my_time (\n  a timestamp not null,\n  b date not null,\n  c time not null,\n  d datetime not null\n);\n\ninsert into my_time values\n('2025-03-30 21:19:23', '2025-03-30', '05:20:04', '2025-04-02 20:16:38'),\n('2025-03-31 21:19:23', '2025-03-31', '05:30:04', '2025-04-02T20:17:38');"
  },
  {
    "path": "magnum/src/test/resources/mysql/my-user.sql",
    "content": "drop table if exists my_user cascade;\n\ncreate table my_user (\n    first_name varchar(200) not null,\n    id bigint auto_increment primary key\n);\n\ninsert into my_user (first_name) values\n('George'),\n('Alexander'),\n('John');\n"
  },
  {
    "path": "magnum/src/test/resources/mysql/no-id.sql",
    "content": "drop table if exists no_id;\n\ncreate table no_id (\n    created_at datetime not null default now(),\n    user_name varchar(200) not null,\n    user_action varchar(200) not null\n);\n\ninsert into no_id values\n('1997-08-15', 'Josh', 'clicked a button'),\n('1997-08-16', 'Danny', 'opened a toaster'),\n('1997-08-17', 'Greg', 'ran some QA tests');\n"
  },
  {
    "path": "magnum/src/test/resources/mysql/person.sql",
    "content": "drop table if exists person cascade;\n\ncreate table person (\n    id bigint primary key,\n    first_name varchar(50),\n    last_name varchar(50) not null,\n    is_admin boolean not null,\n    created datetime not null,\n    social_id varchar(36)\n);\n\ninsert into person (id, first_name, last_name, is_admin, created, social_id) values\n(1, 'George', 'Washington', true, now(), 'd06443a6-3efb-46c4-a66a-a80a8a9a5388'),\n(2, 'Alexander', 'Hamilton', true, now(), '529b6c6d-7228-4da5-81d7-13b706f78ddb'),\n(3, 'John', 'Adams', true, now(), null),\n(4, 'Benjamin', 'Franklin', true, now(), null),\n(5, 'John', 'Jay', true, now(), null),\n(6, 'Thomas', 'Jefferson', true, now(), null),\n(7, 'James', 'Madison', true, now(), null),\n(8, null, 'Nagro', false, now(), null);\n"
  },
  {
    "path": "magnum/src/test/resources/pg/big-dec.sql",
    "content": "drop table if exists big_dec cascade;\n\ncreate table big_dec (\n    id int primary key,\n    my_big_dec numeric\n);\n\ninsert into big_dec values\n(1, 123),\n(2, null);"
  },
  {
    "path": "magnum/src/test/resources/pg/car.sql",
    "content": "DROP TABLE IF EXISTS car;\n\nCREATE TABLE car (\n    model VARCHAR(50) NOT NULL,\n    id bigint PRIMARY KEY,\n    top_speed INT NOT NULL,\n    vin INT,\n    color TEXT NOT NULL CHECK (color IN ('Red', 'Green', 'Blue')),\n    created TIMESTAMP WITH TIME ZONE NOT NULL\n);\n\nINSERT INTO car (model, id, top_speed, vin, color, created) VALUES\n('McLaren Senna', 1, 208, 123, 'Red', '2024-11-24T22:17:30.000000000Z'::timestamptz),\n('Ferrari F8 Tributo', 2, 212, 124, 'Green', '2024-11-24T22:17:31.000000000Z'::timestamptz),\n('Aston Martin Superleggera', 3, 211, null, 'Blue', '2024-11-24T22:17:32.000000000Z'::timestamptz);\n"
  },
  {
    "path": "magnum/src/test/resources/pg/my-time.sql",
    "content": "drop table if exists my_time cascade;\n\ncreate table my_time (\n  a timestamptz not null,\n  b date not null,\n  c time not null,\n  d timestamp not null\n);\n\ninsert into my_time values\n('2025-03-30T21:19:23Z', '2025-03-30', '05:20:04', '2025-04-02T20:16:38'),\n('2025-03-31T21:19:23Z', '2025-03-31', '05:30:04', '2025-04-02T20:17:38');"
  },
  {
    "path": "magnum/src/test/resources/pg/my-user.sql",
    "content": "drop table if exists my_user cascade;\n\ncreate table my_user (\n    first_name text not null,\n    id bigint primary key generated always as identity\n);\n\ninsert into my_user (first_name) values\n('George'),\n('Alexander'),\n('John');\n"
  },
  {
    "path": "magnum/src/test/resources/pg/no-id.sql",
    "content": "drop table if exists no_id;\n\ncreate table no_id (\n    created_at timestamptz not null default now(),\n    user_name text not null,\n    user_action text not null\n);\n\ninsert into no_id values\n(timestamp '1997-08-15', 'Josh', 'clicked a button'),\n(timestamp '1997-08-16', 'Danny', 'opened a toaster'),\n(timestamp '1997-08-17', 'Greg', 'ran some QA tests');\n"
  },
  {
    "path": "magnum/src/test/resources/pg/person.sql",
    "content": "drop table if exists person cascade;\n\ncreate table person (\n    id bigint primary key,\n    first_name varchar(50),\n    last_name varchar(50) not null,\n    is_admin boolean not null,\n    created timestamptz not null,\n    social_id UUID\n);\n\ninsert into person (id, first_name, last_name, is_admin, created, social_id) values\n(1, 'George', 'Washington', true, now(), 'd06443a6-3efb-46c4-a66a-a80a8a9a5388'),\n(2, 'Alexander', 'Hamilton', true, now(), '529b6c6d-7228-4da5-81d7-13b706f78ddb'),\n(3, 'John', 'Adams', true, now(), null),\n(4, 'Benjamin', 'Franklin', true, now(), null),\n(5, 'John', 'Jay', true, now(), null),\n(6, 'Thomas', 'Jefferson', true, now(), null),\n(7, 'James', 'Madison', true, now(), null),\n(8, null, 'Nagro', false, timestamp '1997-08-12', null);\n"
  },
  {
    "path": "magnum/src/test/scala/ClickHouseTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport com.clickhouse.client.config.ClickHouseDefaults\nimport com.clickhouse.jdbc.ClickHouseDataSource\nimport com.dimafeng.testcontainers.ClickHouseContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport munit.{AnyFixture, FunSuite, Location}\nimport org.testcontainers.utility.DockerImageName\nimport shared.*\n\nimport java.nio.file.{Files, Path}\nimport java.util.{Properties, UUID}\nimport scala.util.Using\n\nclass ClickHouseTests extends FunSuite, TestContainersFixtures:\n\n  sharedTests(this, ClickhouseDbType, xa)\n\n  test(\"only allows EC =:= E\"):\n    intercept[IllegalArgumentException]:\n      case class UserCreator(name: String) derives DbCodec\n      @Table(ClickhouseDbType)\n      case class User(id: UUID, name: String) derives DbCodec\n      val repo = Repo[UserCreator, User, UUID]\n\n  val clickHouseContainer = ForAllContainerFixture(\n    ClickHouseContainer\n      .Def(dockerImageName =\n        DockerImageName.parse(\"clickhouse/clickhouse-server:24.3.12.75\")\n      )\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ clickHouseContainer\n\n  def xa(): Transactor =\n    val clickHouse = clickHouseContainer()\n    val props = Properties()\n    props.put(ClickHouseDefaults.USER.getKey, clickHouse.username)\n    props.put(ClickHouseDefaults.PASSWORD.getKey, clickHouse.password)\n    val ds = ClickHouseDataSource(clickHouse.jdbcUrl, props)\n    val tableDDLs = Vector(\n      \"clickhouse/car.sql\",\n      \"clickhouse/no-id.sql\",\n      \"clickhouse/person.sql\",\n      \"clickhouse/big-dec.sql\",\n      \"clickhouse/my-time.sql\"\n    ).map(p => Files.readString(Path.of(getClass.getResource(p).toURI)))\n    Using\n      .Manager(use =>\n        val con = use(ds.getConnection)\n        val stmt = use(con.createStatement)\n        for ddl <- tableDDLs do stmt.execute(ddl)\n      )\n      .get\n    Transactor(ds)\n  end xa\nend ClickHouseTests\n"
  },
  {
    "path": "magnum/src/test/scala/EffectiveSubsetTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport munit.FunSuite\n\nclass EffectiveSubsetTests extends FunSuite:\n\n  test(\"DbSchema macro error if EC not an effective subset of E\"):\n    case class PersonCreator(first: String, last: String)\n    case class Person(id: Long, last: String) derives DbCodec\n    compileErrors(\"DbSchema[PersonCreator, Person, Long]\")\n\n  test(\"Repo macro error if EC not an effective subset of E\"):\n    case class PersonCreator(first: String, last: String)\n    case class Person(id: Long, last: String) derives DbCodec\n    compileErrors(\"Repo[PersonCreator, Person, Long]\")\n"
  },
  {
    "path": "magnum/src/test/scala/H2Tests.scala",
    "content": "import com.augustnagro.magnum.*\nimport munit.FunSuite\nimport org.h2.jdbcx.JdbcDataSource\nimport shared.*\n\nimport java.nio.file.{Files, Path}\nimport scala.util.Using\nimport scala.util.Using.Manager\n\nclass H2Tests extends FunSuite:\n\n  sharedTests(this, H2DbType, xa)\n\n  lazy val h2DbPath = Files.createTempDirectory(null).toAbsolutePath\n\n  def xa(): Transactor =\n    val ds = JdbcDataSource()\n    ds.setURL(\"jdbc:h2:\" + h2DbPath)\n    ds.setUser(\"sa\")\n    ds.setPassword(\"\")\n    val tableDDLs = Vector(\n      \"/h2/car.sql\",\n      \"/h2/person.sql\",\n      \"/h2/my-user.sql\",\n      \"/h2/no-id.sql\",\n      \"/h2/big-dec.sql\",\n      \"/h2/my-time.sql\"\n    ).map(p => Files.readString(Path.of(getClass.getResource(p).toURI)))\n    Manager(use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement)\n      for ddl <- tableDDLs do stmt.execute(ddl)\n    )\n    Transactor(ds)\n\nend H2Tests\n"
  },
  {
    "path": "magnum/src/test/scala/MySqlTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport com.augustnagro.magnum.UUIDCodec.VarCharUUIDCodec\nimport com.dimafeng.testcontainers.MySQLContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport com.mysql.cj.jdbc.MysqlDataSource\nimport munit.{AnyFixture, FunSuite, Location}\nimport org.testcontainers.utility.DockerImageName\nimport shared.*\n\nimport java.nio.file.{Files, Path}\nimport scala.util.Using\nimport scala.util.Using.Manager\n\nclass MySqlTests extends FunSuite, TestContainersFixtures:\n\n  sharedTests(this, MySqlDbType, xa)\n\n  val mySqlContainer = ForAllContainerFixture(\n    MySQLContainer\n      .Def(dockerImageName = DockerImageName.parse(\"mysql:8.0.32\"))\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ mySqlContainer\n\n  def xa(): Transactor =\n    val mySql = mySqlContainer()\n    val ds = MysqlDataSource()\n    ds.setURL(mySql.jdbcUrl)\n    ds.setUser(mySql.username)\n    ds.setPassword(mySql.password)\n    ds.setAllowMultiQueries(true)\n    ds.setServerTimezone(\"UTC\")\n    val tableDDLs = Vector(\n      \"/mysql/car.sql\",\n      \"/mysql/person.sql\",\n      \"/mysql/my-user.sql\",\n      \"/mysql/no-id.sql\",\n      \"/mysql/big-dec.sql\",\n      \"/mysql/my-time.sql\"\n    ).map(p => Files.readString(Path.of(getClass.getResource(p).toURI)))\n    Manager(use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement())\n      for ddl <- tableDDLs do stmt.execute(ddl)\n    ).get\n    Transactor(ds)\n  end xa\nend MySqlTests\n"
  },
  {
    "path": "magnum/src/test/scala/OracleTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport com.augustnagro.magnum.UUIDCodec.VarCharUUIDCodec\nimport com.dimafeng.testcontainers.OracleContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport munit.{AnyFixture, FunSuite}\nimport oracle.jdbc.datasource.impl.OracleDataSource\nimport org.testcontainers.utility.DockerImageName\nimport shared.*\n\nimport java.sql.Statement\nimport java.time.LocalTime\nimport scala.util.Using\n\nclass OracleTests extends FunSuite, TestContainersFixtures:\n\n  given DbCodec[Boolean] =\n    DbCodec[String].biMap(_ == \"Y\", b => if b then \"Y\" else \"N\")\n\n  given DbCodec[LocalTime] =\n    DbCodec[String].biMap(LocalTime.parse, _.toString)\n\n  sharedTests(this, OracleDbType, xa)\n\n  val oracleContainer = ForAllContainerFixture(\n    OracleContainer\n      .Def(dockerImageName =\n        DockerImageName.parse(\n          \"gvenzl/oracle-xe:21.3.0\"\n        )\n      )\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ oracleContainer\n\n  def xa(): Transactor =\n    val oracle = oracleContainer()\n    val ds = OracleDataSource()\n    ds.setURL(oracle.jdbcUrl)\n    ds.setUser(oracle.username)\n    ds.setPassword(oracle.password)\n    // oracle doesn't support drop if exists,\n    // or multi-statement queries\n    Using\n      .Manager(use =>\n        val con = use(ds.getConnection())\n        val stmt = use(con.createStatement())\n        try stmt.execute(\"drop table car\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table car (\n          |  model varchar2(50) not null,\n          |  id number primary key,\n          |  top_speed number not null,\n          |  vin number,\n          |  color varchar2(50) not null check (color in ('Red', 'Green', 'Blue')),\n          |  created timestamp not null\n          |)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into car (model, id, top_speed, vin, color, created)\n          |values ('McLaren Senna', 1, 208, 123, 'Red', timestamp '2024-11-24 22:17:30')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into car (model, id, top_speed, vin, color, created)\n          |values ('Ferrari F8 Tributo', 2, 212, 124, 'Green', timestamp '2024-11-24 22:17:31')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into car (model, id, top_speed, vin, color, created)\n          |values ('Aston Martin Superleggera', 3, 211, null, 'Blue', timestamp '2024-11-24 22:17:32')\"\"\".stripMargin\n        )\n        try stmt.execute(\"drop table person\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table person (\n          |    id number primary key,\n          |    first_name varchar2(50),\n          |    last_name varchar2(50) not null,\n          |    is_admin varchar2(1) not null,\n          |    created timestamp not null,\n          |    social_id varchar2(36)\n          |)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(1, 'George', 'Washington', 'Y', current_timestamp, 'd06443a6-3efb-46c4-a66a-a80a8a9a5388')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(2, 'Alexander', 'Hamilton', 'Y', current_timestamp, '529b6c6d-7228-4da5-81d7-13b706f78ddb')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(3, 'John', 'Adams', 'Y', current_timestamp, null)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(4, 'Benjamin', 'Franklin', 'Y', current_timestamp, null)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(5, 'John', 'Jay', 'Y', current_timestamp, null)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(6, 'Thomas', 'Jefferson', 'Y', current_timestamp, null)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(7, 'James', 'Madison', 'Y', current_timestamp, null)\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n          |(8, null, 'Nagro', 'N', current_timestamp, null)\"\"\".stripMargin\n        )\n        try stmt.execute(\"drop table my_user\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table my_user (\n            |  first_name varchar2(200) not null,\n            |  id number generated always as identity,\n            |  primary key (id)\n            |)\n            |\"\"\".stripMargin\n        )\n        stmt.execute(\"\"\"insert into my_user (first_name) values ('George')\"\"\")\n        stmt.execute(\n          \"\"\"insert into my_user (first_name) values ('Alexander')\"\"\"\n        )\n        stmt.execute(\"\"\"insert into my_user (first_name) values ('John')\"\"\")\n        try stmt.execute(\"drop table no_id\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table no_id (\n            |  created_at timestamp not null,\n            |  user_name varchar2(200) not null,\n            |  user_action varchar2(200) not null\n            |)\n            |\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into no_id (created_at, user_name, user_action) values\n            |(timestamp '1997-08-15 00:00:00', 'Josh', 'clicked a button')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into no_id (created_at, user_name, user_action) values\n            |(timestamp '1997-08-16 00:00:00', 'Danny', 'opened a toaster')\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"\"\"insert into no_id (created_at, user_name, user_action) values\n            |(timestamp '1997-08-17 00:00:00', 'Greg', 'ran some QA tests')\"\"\".stripMargin\n        )\n        try stmt.execute(\"drop table big_dec\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table big_dec (\n            |  id number primary key,\n            |  my_big_dec numeric\n            |)\"\"\".stripMargin\n        )\n        stmt.execute(\"insert into big_dec (id, my_big_dec) values (1, 123)\")\n        stmt.execute(\"insert into big_dec (id, my_big_dec) values (2, null)\")\n        try stmt.execute(\"drop table my_time\")\n        catch case _ => ()\n        stmt.execute(\n          \"\"\"create table my_time (\n            |  a timestamp with local time zone not null,\n            |  b date not null,\n            |  c VARCHAR2(100) not null,\n            |  d timestamp not null\n            |)\n            |\"\"\".stripMargin\n        )\n        stmt.execute(\n          \"insert into my_time values (timestamp '2025-03-30 21:19:23 -00:00', date '2025-03-30', '05:20:04', timestamp '2025-04-02 20:16:38')\"\n        )\n        stmt.execute(\n          \"insert into my_time values (timestamp '2025-03-31 21:19:23 -00:00', date '2025-03-31', '05:30:04', timestamp '2025-04-02 20:17:38')\"\n        )\n      )\n      .get\n    Transactor(ds)\n  end xa\nend OracleTests\n"
  },
  {
    "path": "magnum/src/test/scala/PgTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport com.dimafeng.testcontainers.PostgreSQLContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport munit.{AnyFixture, FunSuite, Location}\nimport org.postgresql.ds.PGSimpleDataSource\nimport org.testcontainers.utility.DockerImageName\nimport shared.*\n\nimport java.nio.file.{Files, Path}\nimport scala.util.Using\nimport scala.util.Using.Manager\n\nclass PgTests extends FunSuite, TestContainersFixtures:\n\n  sharedTests(this, PostgresDbType, xa)\n\n  val pgContainer = ForAllContainerFixture(\n    PostgreSQLContainer\n      .Def(dockerImageName = DockerImageName.parse(\"postgres:17.0\"))\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ pgContainer\n\n  def xa(): Transactor =\n    val ds = PGSimpleDataSource()\n    val pg = pgContainer()\n    ds.setUrl(pg.jdbcUrl)\n    ds.setUser(pg.username)\n    ds.setPassword(pg.password)\n    val tableDDLs = Vector(\n      \"/pg/car.sql\",\n      \"/pg/person.sql\",\n      \"/pg/my-user.sql\",\n      \"/pg/no-id.sql\",\n      \"/pg/big-dec.sql\",\n      \"/pg/my-time.sql\"\n    ).map(p => Files.readString(Path.of(getClass.getResource(p).toURI)))\n\n    Manager(use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement)\n      for ddl <- tableDDLs do stmt.execute(ddl)\n    ).get\n    Transactor(ds)\n  end xa\nend PgTests\n"
  },
  {
    "path": "magnum/src/test/scala/SqliteTests.scala",
    "content": "import com.augustnagro.magnum.*\nimport com.augustnagro.magnum.UUIDCodec.VarCharUUIDCodec\nimport munit.FunSuite\nimport org.sqlite.SQLiteDataSource\nimport shared.*\n\nimport java.nio.file.Files\nimport java.time.{LocalDate, LocalDateTime, LocalTime, OffsetDateTime}\nimport java.util.UUID\nimport scala.util.Using\nimport scala.util.Using.Manager\n\nclass SqliteTests extends FunSuite:\n\n  given DbCodec[OffsetDateTime] =\n    DbCodec[String].biMap(OffsetDateTime.parse, _.toString)\n\n  given DbCodec[LocalDate] =\n    DbCodec[String].biMap(LocalDate.parse, _.toString)\n\n  given DbCodec[UUID] =\n    DbCodec[String].biMap(UUID.fromString, _.toString)\n\n  given DbCodec[Boolean] =\n    DbCodec[Int].biMap(_ != 0, b => if b then 1 else 0)\n\n  given DbCodec[BigDecimal] =\n    DbCodec[String].biMap(BigDecimal.apply, _.toString())\n\n  given DbCodec[LocalTime] =\n    DbCodec[String].biMap(LocalTime.parse, _.toString)\n\n  given DbCodec[LocalDateTime] =\n    DbCodec[String].biMap(LocalDateTime.parse, _.toString)\n\n  sharedTests(this, SqliteDbType, xa)\n\n  lazy val sqliteDbPath = Files.createTempFile(null, \".db\").toAbsolutePath\n\n  def xa(): Transactor =\n    val ds = SQLiteDataSource()\n    ds.setUrl(\"jdbc:sqlite:\" + sqliteDbPath)\n    Manager(use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement)\n      stmt.execute(\"drop table if exists car\")\n      stmt.execute(\n        \"\"\"create table car (\n            |    model text not null,\n            |    id integer primary key,\n            |    top_speed integer not null,\n            |    vin integer,\n            |    color text check (color in ('Red', 'Green', 'Blue')) not null,\n            |    created text not null\n            |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into car (model, id, top_speed, vin, color, created) values\n            |('McLaren Senna', 1, 208, 123, 'Red', '2024-11-24T22:17:30.000000000Z'),\n            |('Ferrari F8 Tributo', 2, 212, 124, 'Green', '2024-11-24T22:17:31.000000000Z'),\n            |('Aston Martin Superleggera', 3, 211, null, 'Blue', '2024-11-24T22:17:32.000000000Z')\"\"\".stripMargin\n      )\n      stmt.execute(\"drop table if exists person\")\n      stmt.execute(\n        \"\"\"create table person (\n            |    id integer primary key,\n            |    first_name text,\n            |    last_name text not null,\n            |    is_admin integer not null,\n            |    created text not null,\n            |    social_id varchar(36)\n            |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into person (id, first_name, last_name, is_admin, created, social_id) values\n            |(1, 'George', 'Washington', true, '2024-11-24T22:17:30.000000000Z', 'd06443a6-3efb-46c4-a66a-a80a8a9a5388'),\n            |(2, 'Alexander', 'Hamilton', true, '2024-11-24T22:17:30.000000000Z', '529b6c6d-7228-4da5-81d7-13b706f78ddb'),\n            |(3, 'John', 'Adams', true, '2024-11-24T22:17:30.000000000Z', null),\n            |(4, 'Benjamin', 'Franklin', true, '2024-11-24T22:17:30.000000000Z', null),\n            |(5, 'John', 'Jay', true, '2024-11-24T22:17:30.000000000Z', null),\n            |(6, 'Thomas', 'Jefferson', true, '2024-11-24T22:17:30.000000000Z', null),\n            |(7, 'James', 'Madison', true, '2024-11-24T22:17:30.000000000Z', null),\n            |(8, null, 'Nagro', false, '2024-11-24T22:17:30.000000000Z', null)\"\"\".stripMargin\n      )\n      stmt.execute(\"drop table if exists my_user\")\n      stmt.execute(\n        \"\"\"create table my_user (\n          |  first_name text not null,\n          |  id integer primary key\n          |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into my_user (first_name) values\n          |('George'),\n          |('Alexander'),\n          |('John')\"\"\".stripMargin\n      )\n      stmt.execute(\"drop table if exists no_id\")\n      stmt.execute(\n        \"\"\"create table no_id (\n          |  created_at text not null,\n          |  user_name text not null,\n          |  user_action text not null\n          |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into no_id values\n          |('2024-11-24T22:17:30.000000000Z', 'Josh', 'clicked a button'),\n          |('2024-11-24T22:17:30.000000000Z', 'Danny', 'opened a toaster'),\n          |('2024-11-24T22:17:30.000000000Z', 'Greg', 'ran some QA tests');\"\"\".stripMargin\n      )\n      stmt.execute(\"drop table if exists big_dec\")\n      stmt.execute(\n        \"\"\"create table big_dec (\n          |  id integer primary key,\n          |  my_big_dec text\n          |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into big_dec values\n          |(1, '123'),\n          |(2, null)\"\"\".stripMargin\n      )\n      stmt.execute(\"drop table if exists my_time\")\n      stmt.execute(\n        \"\"\"create table my_time (\n          |  a text not null,\n          |  b text not null,\n          |  c text not null,\n          |  d text not null\n          |)\"\"\".stripMargin\n      )\n      stmt.execute(\n        \"\"\"insert into my_time values\n          |('2025-03-30T21:19:23Z', '2025-03-30', '05:20:04', '2025-04-02T20:16:38'),\n          |('2025-03-31T21:19:23Z', '2025-03-31', '05:30:04', '2025-04-02T20:17:38')\"\"\".stripMargin\n      )\n    ).get\n    Transactor(ds)\n  end xa\nend SqliteTests\n"
  },
  {
    "path": "magnum/src/test/scala/opaques.scala",
    "content": "import com.augustnagro.magnum.*\n\nobject opaques:\n  opaque type Age = Int\n  object Age:\n    def apply(value: Int): Age = value\n    extension (opaque: Age) def value: Int = opaque\n\n    given DbCodec[opaques.Age] =\n      DbCodec.IntCodec.biMap(opaques.Age(_), _.value)\n"
  },
  {
    "path": "magnum/src/test/scala/shared/BigDecTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\ndef bigDecTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[BigDecimal]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class BigDec(id: Int, myBigDec: Option[BigDecimal]) derives DbCodec\n\n  val bigDecRepo = Repo[BigDec, BigDec, Int]\n\n  test(\"option of bigdecimal\"):\n    xa().transact:\n      val bigDec1 = bigDecRepo.findById(1).get\n      assert(bigDec1.myBigDec == Some(BigDecimal(123)))\n      val bigDec2 = bigDecRepo.findById(2).get\n      assert(bigDec2.myBigDec == None)\n"
  },
  {
    "path": "magnum/src/test/scala/shared/Color.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.DbCodec\n\nenum Color derives DbCodec:\n  case Red, Green, Blue\n"
  },
  {
    "path": "magnum/src/test/scala/shared/DateTimeTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.sql.Connection\nimport java.time.{\n  LocalDate,\n  LocalDateTime,\n  LocalTime,\n  OffsetDateTime,\n  ZoneOffset\n}\nimport scala.util.Using\n\ndef dateTimeTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[OffsetDateTime],\n    DbCodec[LocalDate],\n    DbCodec[LocalTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class MyTime(\n      a: OffsetDateTime,\n      b: LocalDate,\n      c: LocalTime,\n      d: LocalDateTime\n  ) derives DbCodec\n\n  val myTimeRepo = Repo[MyTime, MyTime, Null]\n\n  val all = Vector(\n    MyTime(\n      a = OffsetDateTime.parse(\"2025-03-30T21:19:23Z\"),\n      b = LocalDate.parse(\"2025-03-30\"),\n      c = LocalTime.parse(\"05:20:04\"),\n      d = LocalDateTime.parse(\"2025-04-02T20:16:38\")\n    ),\n    MyTime(\n      a = OffsetDateTime.parse(\"2025-03-31T21:19:23Z\"),\n      b = LocalDate.parse(\"2025-03-31\"),\n      c = LocalTime.parse(\"05:30:04\"),\n      d = LocalDateTime.parse(\"2025-04-02T20:17:38\")\n    )\n  )\n\n  test(\"can read all JDBC 4.2 time types\"):\n    // https://jcp.org/aboutJava/communityprocess/maintenance/jsr221/JDBC4.2MR-Oct232013.pdf (table B-4)\n    // https://jdbc.postgresql.org/documentation/query/\n    xa().connect:\n      assertEquals(myTimeRepo.findAll, all)\n\n  test(\"can write all JDBC 4.2 time types\"):\n    xa().connect:\n      val newTime = MyTime(\n        a = OffsetDateTime.parse(\"2025-04-05T21:18:23Z\"),\n        b = LocalDate.parse(\"2025-04-01\"),\n        c = LocalTime.parse(\"05:20:04\"),\n        d = LocalDateTime.parse(\"2025-04-02T20:17:38\")\n      )\n      myTimeRepo.insert(newTime)\n      val res = sql\"SELECT * FROM my_time ORDER BY a\".query[MyTime].run()\n      assertEquals(all :+ newTime, res)\n\nend dateTimeTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/EmbeddedFragTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.util.UUID\n\ndef embeddedFragTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(\n    using Location\n): Unit =\n  import suite.*\n\n  test(\"embed Frag into Frag\"):\n    def findPersonCnt(filter: Frag)(using DbCon): Int =\n      val x = sql\"id != ${util.Random.nextInt(20) + 20}\"\n      sql\"SELECT count(*) FROM person WHERE $filter AND $x\"\n        .query[Int]\n        .run()\n        .head\n    val isAdminFrag =\n      if dbType == OracleDbType then sql\"is_admin = 'Y'\"\n      else sql\"is_admin = true\"\n    xa().connect:\n      val johnCnt =\n        findPersonCnt(sql\"$isAdminFrag AND first_name = 'John'\")\n      assert(johnCnt == 2)\n\n  test(\"embedded frag param exprs should be evaluated only once\"):\n    object Holder:\n      var uuid: UUID = _\n      def set(uuid: UUID): UUID =\n        this.uuid = uuid\n        uuid\n    val frag =\n      sql\"select * from person where ${sql\"social_id = ${Holder.set(UUID.randomUUID)}\"}\"\n    assert(frag.params.size == 1)\n    assert(frag.params.head == Holder.uuid)\nend embeddedFragTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/EntityCreatorTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport com.augustnagro.magnum.SqlException\nimport munit.{FunSuite, Location}\n\nimport scala.util.Using\n\ndef entityCreatorTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(\n    using Location\n): Unit =\n  import suite.*\n  if dbType == ClickhouseDbType then return\n\n  case class MyUserCreator(firstName: String) derives DbCodec\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class MyUser(firstName: String, id: Long) derives DbCodec\n\n  val userRepo = Repo[MyUserCreator, MyUser, Long]\n  val user = TableInfo[MyUserCreator, MyUser, Long]\n\n  test(\"insert EntityCreator\"):\n    xa().connect:\n      userRepo.insert(MyUserCreator(\"Ash\"))\n      userRepo.insert(MyUserCreator(\"Steve\"))\n      assert(userRepo.count == 5L)\n      assert(userRepo.findAll.map(_.firstName).contains(\"Steve\"))\n\n  test(\"insertReturning EntityCreator\"):\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val user = userRepo.insertReturning(MyUserCreator(\"Ash\"))\n      assert(user.firstName == \"Ash\")\n\n  test(\"insertAllReturning EntityCreator\"):\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val newUsers = Vector(\n        MyUserCreator(\"Ash\"),\n        MyUserCreator(\"Steve\"),\n        MyUserCreator(\"Josh\")\n      )\n      val users = userRepo.insertAllReturning(newUsers)\n      assert(userRepo.count == 6L)\n      assert(users.size == 3)\n      assert(users.last.firstName == newUsers.last.firstName)\n\n  test(\"insert invalid EntityCreator\"):\n    intercept[SqlException]:\n      xa().connect:\n        val invalidUser = MyUserCreator(null)\n        userRepo.insert(invalidUser)\n\n  test(\"insertAll EntityCreator\"):\n    xa().connect:\n      val newUsers = Vector(\n        MyUserCreator(\"Ash\"),\n        MyUserCreator(\"Steve\"),\n        MyUserCreator(\"Josh\")\n      )\n      userRepo.insertAll(newUsers)\n      assert(userRepo.count == 6L)\n      assert(\n        userRepo.findAll.map(_.firstName).contains(newUsers.last.firstName)\n      )\n\n  test(\"custom insert EntityCreator\"):\n    xa().connect:\n      val u = MyUserCreator(\"Ash\")\n      val update =\n        sql\"insert into $user ${user.insertColumns} values ($u)\".update\n      assertNoDiff(\n        update.frag.sqlString,\n        \"insert into my_user (first_name) values (?)\"\n      )\n      val rowsInserted = update.run()\n      assert(rowsInserted == 1)\n      assert(userRepo.count == 4L)\n      assert(userRepo.findAll.exists(_.firstName == \"Ash\"))\n\n  test(\"custom update EntityCreator\"):\n    xa().connect:\n      val u = userRepo.findAll.head\n      val newName = \"Ash\"\n      val update =\n        sql\"update $user set ${user.firstName} = $newName where ${user.id} = ${u.id}\".update\n      assertNoDiff(\n        update.frag.sqlString,\n        \"update my_user set first_name = ? where id = ?\"\n      )\n      val rowsUpdated = update.run()\n      assert(rowsUpdated == 1)\n      assert(userRepo.findAll.exists(_.firstName == \"Ash\"))\n\n  test(\".returning iterator\"):\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      Using.Manager(implicit use =>\n        val it =\n          if dbType == H2DbType then\n            sql\"INSERT INTO $user ${user.insertColumns} VALUES ('Bob')\"\n              .returningKeys[Long](user.id)\n              .iterator()\n          else\n            sql\"INSERT INTO $user ${user.insertColumns} VALUES ('Bob') RETURNING ${user.id}\"\n              .returning[Long]\n              .iterator()\n        assert(it.size == 1)\n      )\n\nend entityCreatorTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/ImmutableRepoTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.sql.{Connection, PreparedStatement, ResultSet}\nimport java.time.{OffsetDateTime, ZoneOffset}\nimport scala.util.Using\n\ndef immutableRepoTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(\n    using\n    Location,\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val carRepo = ImmutableRepo[Car, Long]\n  val car = TableInfo[Car, Car, Long]\n\n  val allCars = Vector(\n    Car(\n      model = \"McLaren Senna\",\n      id = 1L,\n      topSpeed = 208,\n      vinNumber = Some(123),\n      color = Color.Red,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:30.000000000Z\")\n    ),\n    Car(\n      model = \"Ferrari F8 Tributo\",\n      id = 2L,\n      topSpeed = 212,\n      vinNumber = Some(124),\n      color = Color.Green,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:31.000000000Z\")\n    ),\n    Car(\n      model = \"Aston Martin Superleggera\",\n      id = 3L,\n      topSpeed = 211,\n      vinNumber = None,\n      color = Color.Blue,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:32.000000000Z\")\n    )\n  )\n\n  test(\"count\"):\n    xa().connect:\n      assert(carRepo.count == 3L)\n\n  test(\"existsById\"):\n    xa().connect:\n      assert(carRepo.existsById(3L))\n      assert(!carRepo.existsById(4L))\n\n  test(\"findAll\"):\n    val cars = xa().connect:\n      carRepo.findAll\n    assert(cars == allCars)\n\n  test(\"findById\"):\n    xa().connect:\n      assert(carRepo.findById(3L).get == allCars.last)\n      assert(carRepo.findById(4L) == None)\n\n  test(\"findAllByIds\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != OracleDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val ids = carRepo.findAllById(Vector(1L, 3L)).map(_.id)\n      assert(ids == Vector(1L, 3L))\n\n  test(\"serializable transaction\"):\n    xa()\n      .withConnectionConfig(withSerializable)\n      .transact:\n        assert(carRepo.count == 3L)\n\n  def withSerializable(con: Connection): Unit =\n    con.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE)\n\n  test(\"select query\"):\n    xa().connect:\n      val minSpeed: Int = 210\n      val query =\n        sql\"select ${car.all} from $car where ${car.topSpeed} > $minSpeed\"\n          .query[Car]\n      assertNoDiff(\n        query.frag.sqlString,\n        \"select model, id, top_speed, vin, color, created from car where top_speed > ?\"\n      )\n      assert(query.frag.params == Vector(minSpeed))\n      assert(query.run() == allCars.tail)\n\n  test(\"select query with aliasing\"):\n    xa().connect:\n      val minSpeed = 210\n      val cAlias = car.alias(\"c\")\n      val query =\n        sql\"select ${cAlias.all} from $cAlias where ${cAlias.topSpeed} > $minSpeed\"\n          .query[Car]\n      assertNoDiff(\n        query.frag.sqlString,\n        \"select c.model, c.id, c.top_speed, c.vin, c.color, c.created from car c where c.top_speed > ?\"\n      )\n      assert(query.frag.params == Vector(minSpeed))\n      assert(query.run() == allCars.tail)\n\n  test(\"select via option\"):\n    xa().connect:\n      val vin = Option(124)\n      val cars =\n        sql\"select * from car where vin = $vin\"\n          .query[Car]\n          .run()\n      assert(cars == allCars.filter(_.vinNumber == vin))\n\n  test(\"tuple select\"):\n    xa().connect:\n      val tuples = sql\"select model, color from car where id = 2\"\n        .query[(String, Color)]\n        .run()\n      assert(tuples == Vector(allCars(1).model -> allCars(1).color))\n\n  test(\"reads null int as None and not Some(0)\"):\n    xa().connect:\n      assert(carRepo.findById(3L).get.vinNumber == None)\n\n  test(\"created timestamps should match\"):\n    xa().connect:\n      assert(carRepo.findAll.map(_.created) == allCars.map(_.created))\n\n  test(\".query iterator\"):\n    xa().connect:\n      Using.Manager(implicit use =>\n        val it = sql\"SELECT * FROM car\".query[Car].iterator()\n        assert(it.map(_.id).size == 3)\n      )\n\n  test(\"sql interpolator selects right DbCodec\"):\n    case class Coord(x: Double, y: Double)\n\n    given DbCodec[Coord] with\n      def cols: IArray[Int] = IArray(java.sql.Types.BINARY)\n      def queryRepr: String = \"MyCoord(?)\"\n      def readSingle(rs: ResultSet, pos: Int): Coord = ???\n      def readSingleOption(rs: ResultSet, pos: Int): Option[Coord] = ???\n      def writeSingle(coord: Coord, ps: PreparedStatement, pos: Int): Unit = ???\n\n    val myCoord = Coord(1, 2)\n\n    val query = sql\"SELECT * FROM test WHERE coord = $myCoord\"\n\n    assert(query.sqlString.contains(\"MyCoord(?)\"))\n\nend immutableRepoTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/MultilineFragTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\ndef multilineFragTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(\n    using Location\n): Unit =\n  import suite.*\n\n  def testSelectPersonFrag(frag: Frag): Unit =\n    xa().connect:\n      val res = frag.query[Int].run()\n      assert(res.size == 1)\n      assert(frag.sqlString == \"SELECT count(*)\\nFROM person\\nWHERE id = ?\")\n      assert(frag.params.size == 1)\n\n  def personId = util.Random.nextInt(8) + 1\n\n  test(\"multiline Frag\"):\n    testSelectPersonFrag(\n      frag = sql\"\"\"SELECT count(*)\nFROM person\nWHERE id = $personId\"\"\"\n    )\n\n  test(\"multiline Frag with stripMargin\"):\n    testSelectPersonFrag(\n      frag = sql\"\"\"SELECT count(*)\n                  |FROM person\n                  |WHERE id = $personId\"\"\".stripMargin\n    )\n\n  test(\"multiline Frag with stripMargin('*')\"):\n    testSelectPersonFrag(\n      frag = sql\"\"\"SELECT count(*)\n                  *FROM person\n                  *WHERE id = $personId\"\"\".stripMargin('*')\n    )\n\nend multilineFragTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/NoIdTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.time.OffsetDateTime\n\ndef noIdTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class NoId(\n      createdAt: OffsetDateTime,\n      userName: String,\n      userAction: String\n  ) derives DbCodec\n\n  val noIdRepo = Repo[NoId, NoId, Null]()\n\n  val noIdTableInfo = TableInfo[NoId, NoId, Null]\n\n  test(\"insert NoId entities\"):\n    xa().connect:\n      val entity = NoId(OffsetDateTime.now, \"Dan\", \"Fishing\")\n      noIdRepo.insert(entity)\n      assert(noIdRepo.findAll.exists(_.userName == \"Dan\"))\n\n  test(\"select NoId usernames via TableInfo\"):\n    xa().connect:\n      val userNames = sql\"SELECT ${noIdTableInfo.userName} FROM $noIdTableInfo\"\n        .query[String]\n        .run()\n      assert(userNames.size == 3)\n\nend noIdTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/OptionalProductTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.time.OffsetDateTime\n\ndef optionalProductTests(\n    suite: FunSuite,\n    dbType: DbType,\n    xa: () => Transactor\n)(using Location, DbCodec[BigDecimal], DbCodec[OffsetDateTime]): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class BigDec(id: Int, myBigDec: Option[BigDecimal]) derives DbCodec\n\n  test(\"left join with optional product type\"):\n    assume(dbType != ClickhouseDbType)\n    xa().connect:\n      val res = sql\"select * from car c left join big_dec bd on bd.id = c.id\"\n        .query[(Car, Option[BigDec])]\n        .run()\n      assert(res.exists((_, bigDec) => bigDec.isEmpty))\nend optionalProductTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/RepoTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport com.augustnagro.magnum.{BatchUpdateResult, SqlException, ColumnNames}\nimport munit.FunSuite\n\nimport java.time.OffsetDateTime\nimport java.util.UUID\n\ndef repoTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    munit.Location,\n    DbCodec[UUID],\n    DbCodec[Boolean],\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Person(\n      id: Long,\n      firstName: Option[String],\n      lastName: String,\n      isAdmin: Boolean,\n      created: OffsetDateTime,\n      socialId: Option[UUID]\n  ) derives DbCodec\n\n  val personRepo = Repo[Person, Person, Long]\n  val person = TableInfo[Person, Person, Long]\n\n  test(\"delete\"):\n    xa().connect:\n      val p = personRepo.findById(1L).get\n      personRepo.delete(p)\n      assert(personRepo.findById(1L) == None)\n\n  test(\"delete invalid\"):\n    xa().connect:\n      personRepo.delete(\n        Person(999L, None, \"\", false, OffsetDateTime.now, None)\n      )\n      assert(8L == personRepo.count)\n\n  test(\"deleteById\"):\n    xa().connect:\n      personRepo.deleteById(1L)\n      personRepo.deleteById(2L)\n      personRepo.deleteById(1L)\n      assert(personRepo.findAll.size == 6)\n\n  test(\"deleteAll\"):\n    xa().connect:\n      val p1 = personRepo.findById(1L).get\n      val p2 = p1.copy(id = 2L)\n      val p3 = p1.copy(id = 999L)\n      val expectedRowsUpdate = dbType match\n        case ClickhouseDbType => 3\n        case _                => 2\n      val res = personRepo.deleteAll(Vector(p1, p2, p3))\n      assert(res == BatchUpdateResult.Success(expectedRowsUpdate))\n      assert(6L == personRepo.count)\n\n  test(\"deleteAllById\"):\n    xa().connect:\n      val expectedRowsUpdate = dbType match\n        case ClickhouseDbType => 3\n        case _                => 2\n      val res = personRepo.deleteAllById(Vector(1L, 2L, 1L))\n      assert(res == BatchUpdateResult.Success(expectedRowsUpdate))\n      assert(6L == personRepo.count)\n\n  test(\"truncate\"):\n    xa().connect:\n      personRepo.truncate()\n      assert(personRepo.count == 0L)\n\n  test(\"insert\"):\n    xa().connect:\n      personRepo.insert(\n        Person(\n          id = 9L,\n          firstName = Some(\"John\"),\n          lastName = \"Smith\",\n          isAdmin = false,\n          socialId = Some(UUID.randomUUID),\n          created = OffsetDateTime.now\n        )\n      )\n      personRepo.insert(\n        Person(\n          id = 10L,\n          firstName = None,\n          lastName = \"Prince\",\n          isAdmin = true,\n          socialId = None,\n          created = OffsetDateTime.now\n        )\n      )\n      assert(personRepo.count == 10L)\n      assert(personRepo.findAll.map(_.lastName).contains(\"Smith\"))\n\n  test(\"insertReturning\"):\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val person = personRepo.insertReturning(\n        Person(\n          id = 9L,\n          firstName = Some(\"John\"),\n          lastName = \"Smith\",\n          isAdmin = false,\n          socialId = None,\n          created = OffsetDateTime.now\n        )\n      )\n      assert(person.lastName == \"Smith\")\n\n  test(\"insertAllReturning\"):\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val newPc = Vector(\n        Person(\n          id = 9L,\n          firstName = Some(\"Chandler\"),\n          lastName = \"Johnsored\",\n          isAdmin = true,\n          socialId = Some(UUID.randomUUID()),\n          created = OffsetDateTime.now\n        ),\n        Person(\n          id = 10L,\n          firstName = None,\n          lastName = \"Odysseus\",\n          isAdmin = false,\n          socialId = None,\n          created = OffsetDateTime.now\n        ),\n        Person(\n          id = 11L,\n          firstName = Some(\"Jorge\"),\n          lastName = \"Masvidal\",\n          isAdmin = true,\n          socialId = None,\n          created = OffsetDateTime.now\n        )\n      )\n      val people = personRepo.insertAllReturning(newPc)\n      assert(personRepo.count == 11L)\n      assert(people.size == 3)\n      assert(people.last.lastName == newPc.last.lastName)\n\n  test(\"insert invalid\"):\n    intercept[SqlException]:\n      xa().connect:\n        val invalidP =\n          Person(9L, None, null, false, OffsetDateTime.now, None)\n        personRepo.insert(invalidP)\n\n  test(\"update\"):\n    assume(dbType != ClickhouseDbType)\n    xa().connect:\n      val p = personRepo.findById(1L).get\n      val updated = p.copy(firstName = None, isAdmin = false)\n      personRepo.update(updated)\n      assert(personRepo.findById(1L).get == updated)\n\n  test(\"update invalid\"):\n    assume(dbType != ClickhouseDbType)\n    intercept[SqlException]:\n      xa().connect:\n        val p = personRepo.findById(1L).get\n        val updated = p.copy(lastName = null)\n        personRepo.update(updated)\n\n  test(\"insertAll\"):\n    xa().connect:\n      val newPeople = Vector(\n        Person(\n          id = 9L,\n          firstName = Some(\"Chandler\"),\n          lastName = \"Johnsored\",\n          isAdmin = true,\n          socialId = Some(UUID.randomUUID()),\n          created = OffsetDateTime.now\n        ),\n        Person(\n          id = 10L,\n          firstName = None,\n          lastName = \"Odysseus\",\n          isAdmin = false,\n          socialId = None,\n          created = OffsetDateTime.now\n        ),\n        Person(\n          id = 11L,\n          firstName = Some(\"Jorge\"),\n          lastName = \"Masvidal\",\n          isAdmin = true,\n          socialId = None,\n          created = OffsetDateTime.now\n        )\n      )\n      personRepo.insertAll(newPeople)\n      assert(personRepo.count == 11L)\n      assert(\n        personRepo.findAll.map(_.lastName).contains(newPeople.last.lastName)\n      )\n\n  test(\"updateAll\"):\n    assume(dbType != ClickhouseDbType)\n    xa().connect:\n      val newPeople = Vector(\n        personRepo.findById(1L).get.copy(lastName = \"Peterson\"),\n        personRepo.findById(2L).get.copy(lastName = \"Moreno\")\n      )\n      val res = personRepo.updateAll(newPeople)\n      assert(res == BatchUpdateResult.Success(2))\n      assert(personRepo.findById(1L).get == newPeople(0))\n      assert(personRepo.findById(2L).get == newPeople(1))\n\n  test(\"transact\"):\n    assume(dbType != ClickhouseDbType)\n    val count = xa().transact:\n      val p = Person(\n        id = 9L,\n        firstName = Some(\"Chandler\"),\n        lastName = \"Brown\",\n        isAdmin = false,\n        created = OffsetDateTime.now,\n        socialId = None\n      )\n      personRepo.insert(p)\n      personRepo.count\n    assert(count == 9L)\n\n  test(\"transact failed\"):\n    assume(dbType != ClickhouseDbType)\n    val transactor = xa()\n    val p = Person(\n      id = 9L,\n      firstName = Some(\"Chandler\"),\n      lastName = \"Brown\",\n      isAdmin = false,\n      created = OffsetDateTime.now,\n      socialId = None\n    )\n    try\n      transactor.transact:\n        personRepo.insert(p)\n        throw RuntimeException()\n      fail(\"should not reach\")\n    catch\n      case _: Exception =>\n        transactor.transact:\n          assert(personRepo.count == 8L)\n\n  test(\"custom insert\"):\n    xa().connect:\n      val p = Person(\n        id = 9L,\n        firstName = Some(\"Chandler\"),\n        lastName = \"Brown\",\n        isAdmin = false,\n        socialId = None,\n        created = OffsetDateTime.now\n      )\n      val update =\n        sql\"insert into $person ${person.insertColumns} values ($p)\".update\n      assertNoDiff(\n        update.frag.sqlString,\n        \"insert into person (id, first_name, last_name, is_admin, created, social_id) values (?, ?, ?, ?, ?, ?)\"\n      )\n      val rowsInserted = update.run()\n      assert(rowsInserted == 1)\n      assert(personRepo.count == 9L)\n      assert(\n        personRepo.findAll.exists(fetched =>\n          fetched.firstName == p.firstName &&\n            fetched.lastName == p.lastName &&\n            fetched.isAdmin == p.isAdmin\n        )\n      )\n\n  test(\"custom update\"):\n    xa().connect:\n      val p = Person(\n        id = 9L,\n        firstName = Some(\"Chandler\"),\n        lastName = \"Brown\",\n        isAdmin = false,\n        socialId = Some(UUID.randomUUID()),\n        created = OffsetDateTime.now\n      )\n      personRepo.insert(p)\n      val newIsAdmin = true\n      val update =\n        sql\"update $person set ${person.isAdmin} = $newIsAdmin where ${person.id} = ${p.id}\".update\n      assertNoDiff(\n        update.frag.sqlString,\n        \"update person set is_admin = ? where id = ?\"\n      )\n      val rowsUpdated = update.run()\n      assert(rowsUpdated == 1)\n      assert(personRepo.findById(p.id).get.isAdmin == true)\n\n  test(\"custom returning a single column\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val personId =\n        if dbType == H2DbType then\n          sql\"\"\"insert into person (id, first_name, last_name, created, is_admin)\n                values (9, 'Arton', 'Senna', now(), true)\n                \"\"\"\n            .returningKeys[Long](\"id\")\n            .run()\n            .head\n        else if dbType == OracleDbType then\n          sql\"\"\"insert into person (id, first_name, last_name, created, is_admin)\n                values (9, 'Arton', 'Senna', current_timestamp, 'Y')\"\"\"\n            .returningKeys[Long](\"id\")\n            .run()\n            .head\n        else\n          sql\"\"\"insert into person (id, first_name, last_name, created, is_admin)\n                values (9, 'Arton', 'Senna', now(), 'Y') RETURNING id\n                \"\"\".returning[Long].run().head\n      assert(personRepo.findById(personId).get.lastName == \"Senna\")\n\n  test(\"custom returning multiple columns\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    assume(dbType != OracleDbType)\n    xa().connect:\n      val cols =\n        if dbType == H2DbType then\n          sql\"\"\"insert into person (id, first_name, last_name, created, is_admin) values\n             (9, 'Arton', 'Senna', now(), true),\n             (10, 'Demo', 'User', now(), false)\n             \"\"\"\n            .returningKeys[(Long, OffsetDateTime)](\n              person.id,\n              person.created\n            )\n            .run()\n        else\n          sql\"\"\"insert into person (id, first_name, last_name, created, is_admin) values\n               (9, 'Arton', 'Senna', now(), true),\n               (10, 'Demo', 'User', now(), false)\n               RETURNING id, created\n               \"\"\".returning[(Long, OffsetDateTime)].run()\n      val newLastNames =\n        cols.map((id, _) => personRepo.findById(id).get.lastName)\n      assert(newLastNames == Vector(\"Senna\", \"User\"))\n\n  test(\"custom returning with no rows updated\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val personIds =\n        if dbType == H2DbType || dbType == OracleDbType then\n          sql\"update person set first_name = 'xxx' where last_name = 'Not Here'\"\n            .returningKeys[Long](ColumnNames(\"id\", IArray(person.id)))\n            .run()\n        else\n          sql\"update person set first_name = 'xxx' where last_name = 'Not Here' returning id\"\n            .returning[Long]\n            .run()\n      assert(personIds.isEmpty)\n\n  test(\"returning non primary key column\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != SqliteDbType)\n    xa().connect:\n      val personFirstNames =\n        if dbType == H2DbType || dbType == OracleDbType then\n          sql\"update person set last_name = 'xxx'\"\n            .returningKeys[String](person.firstName)\n            .run()\n        else\n          sql\"update person set last_name = 'xxx' returning first_name\"\n            .returning[String]\n            .run()\n\n      assert(personFirstNames.nonEmpty)\nend repoTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/SharedTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.time.{LocalTime, OffsetDateTime}\nimport java.util.UUID\n\ndef sharedTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[UUID],\n    DbCodec[Boolean],\n    DbCodec[OffsetDateTime],\n    DbCodec[BigDecimal],\n    DbCodec[LocalTime]\n): Unit =\n  immutableRepoTests(suite, dbType, xa)\n  repoTests(suite, dbType, xa)\n  entityCreatorTests(suite, dbType, xa)\n  specTests(suite, dbType, xa)\n  sqlNameTests(suite, dbType, xa)\n  noIdTests(suite, dbType, xa)\n  embeddedFragTests(suite, dbType, xa)\n  multilineFragTests(suite, dbType, xa)\n  bigDecTests(suite, dbType, xa)\n  optionalProductTests(suite, dbType, xa)\n  dateTimeTests(suite, dbType, xa)\n  tupleTests(suite, dbType, xa)\n  tableInfoTests(suite, dbType, xa)\nend sharedTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/SpecTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.time.{OffsetDateTime, ZoneOffset}\n\nopaque type CarId = Long\nobject CarId:\n  def apply(value: Long): CarId = value\n  extension (opaque: CarId) def value: Long = opaque\n  given DbCodec[CarId] =\n    DbCodec.LongCodec.biMap(CarId.apply, _.value)\n\ndef specTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val carRepo = ImmutableRepo[Car, Long]\n  val car = TableInfo[Car, Car, Long]\n\n  val allCars = Vector(\n    Car(\n      model = \"McLaren Senna\",\n      id = 1L,\n      topSpeed = 208,\n      vinNumber = Some(123),\n      color = Color.Red,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:30.000000000Z\")\n    ),\n    Car(\n      model = \"Ferrari F8 Tributo\",\n      id = 2L,\n      topSpeed = 212,\n      vinNumber = Some(124),\n      color = Color.Green,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:31.000000000Z\")\n    ),\n    Car(\n      model = \"Aston Martin Superleggera\",\n      id = 3L,\n      topSpeed = 211,\n      vinNumber = None,\n      color = Color.Blue,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:32.000000000Z\")\n    )\n  )\n\n  test(\"like\"):\n    xa().transact:\n      val model = \"Ferr%\"\n      val spec = Spec[Car].where(sql\"model LIKE $model\")\n      assert(carRepo.findAll(spec) == Vector(allCars(1)))\n\n  test(\"select all\"):\n    xa().transact:\n      val spec = Spec[Car]\n      assert(carRepo.findAll(spec) == allCars)\n\n  test(\"empty predicate\"):\n    xa().transact:\n      val spec = Spec[Car].where(sql\"\")\n      assert(carRepo.findAll(spec) == allCars)\n\n  test(\"predicate having param at end\"):\n    xa().transact:\n      val id = CarId(2L)\n      val spec = Spec[Car].where(sql\"$id < id\")\n      assert(carRepo.findAll(spec) == Vector(allCars.last))\n\n  test(\"AND in where predicate\"):\n    xa().transact:\n      val color = Color.Red\n      val model = \"MCLAREN SENNA\"\n      val spec =\n        Spec[Car].where(sql\"color = $color AND $model = upper(model)\")\n      assert(carRepo.findAll(spec) == Vector(allCars.head))\n\n  test(\"multiple where parameters\"):\n    xa().transact:\n      val color = Color.Red\n      val model = \"MCLAREN SENNA\"\n      val spec = Spec[Car]\n        .where(sql\"color = $color\")\n        .where(sql\"$model = upper(model)\")\n      assert(carRepo.findAll(spec) == Vector(allCars.head))\n\n  test(\"orderBy\"):\n    xa().transact:\n      val spec = Spec[Car].orderBy(\"top_speed\")\n      assert(carRepo.findAll(spec) == allCars.sortBy(_.topSpeed))\n\n  test(\"orderBy null with sort order and null order\"):\n    xa().transact:\n      val spec = Spec[Car]\n        .orderBy(\"vin\", SortOrder.Desc, NullOrder.First)\n      assert(carRepo.findAll(spec) == allCars.reverse)\n\n  test(\"limit\"):\n    xa().transact:\n      val spec = Spec[Car].limit(2)\n      assert(carRepo.findAll(spec).size == 2)\n\n  test(\"offset\"):\n    xa().transact:\n      val spec = Spec[Car].offset(1)\n      assert(carRepo.findAll(spec) == allCars.tail)\n\n  test(\"seek\"):\n    xa().transact:\n      val spec = Spec[Car].seek(\"id\", SeekDir.Gt, 2, SortOrder.Asc)\n      assert(carRepo.findAll(spec).size == 1)\n\n  test(\"seek multiple\"):\n    xa().transact:\n      val spec = Spec[Car]\n        .seek(\"id\", SeekDir.Lt, 3, SortOrder.Asc)\n        .seek(\"top_speed\", SeekDir.Gt, 210, SortOrder.Asc)\n      assert(carRepo.findAll(spec) == Vector(allCars(1)))\n\n  test(\"everything\"):\n    xa().transact:\n      val idOpt = Option.empty[CarId]\n      val speed = 210\n      val spec = Spec[Car]\n        .where(idOpt.map(id => sql\"id = $id\").getOrElse(sql\"\"))\n        .where(sql\"top_speed > $speed\")\n        .orderBy(\"model\", SortOrder.Desc)\n        .limit(1)\n        .seek(\"vin\", SeekDir.Gt, 1, SortOrder.Asc, NullOrder.Last)\n      assert(carRepo.findAll(spec) == Vector(allCars(1)))\n\n  test(\"prefix\"):\n    xa().transact:\n      val c = car.alias(\"c\")\n      val color = Color.Red\n      val spec = Spec[Car]\n        .prefix(sql\"SELECT ${c.all} FROM $c\")\n        .where(sql\"${c.color} = $color\")\n      assert(carRepo.findAll(spec) == Vector(allCars.head))\n\n  test(\"prefix with embedded sql\"):\n    xa().transact:\n      val c = car.alias(\"c\")\n      val color = Color.Red\n      val selectPart = sql\"SELECT ${c.all}\"\n      val fromPart = sql\"FROM $c\"\n      val spec = Spec[Car]\n        .prefix(sql\"$selectPart $fromPart\")\n        .where(sql\"${c.color} = $color\")\n      assert(carRepo.findAll(spec) == Vector(allCars.head))\nend specTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/SqlNameTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.time.OffsetDateTime\n\ndef sqlNameTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location\n): Unit =\n  import suite.*\n\n  @SqlName(\"car\")\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class CustomCar(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val customCarRepo = Repo[CustomCar, CustomCar, Long]\n\n  test(\"count with manual table name\"):\n    val count = connect(xa())(customCarRepo.count)\n    assert(count == 3L)\nend sqlNameTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/TableInfoTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.FunSuite\n\nimport java.time.OffsetDateTime\nimport java.util.UUID\n\ndef tableInfoTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    munit.Location,\n    DbCodec[UUID],\n    DbCodec[Boolean],\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Person(\n      id: Long,\n      firstName: Option[String],\n      lastName: String,\n      isAdmin: Boolean,\n      created: OffsetDateTime,\n      socialId: Option[UUID]\n  ) derives DbCodec\n\n  val person = TableInfo[Person, Person, Long]\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val car = TableInfo[Car, Car, Long]\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class NoId(\n      createdAt: OffsetDateTime,\n      userName: String,\n      userAction: String\n  ) derives DbCodec\n\n  val noIdTableInfo = TableInfo[NoId, NoId, Null]\n\n  test(\"NoId TableInfo.idColumn == None\"):\n    assert(noIdTableInfo.idColumn == None)\n\n  test(\"can use TableInfo.idColumn to scrap boilerplate\"):\n    extension [EC: DbCodec, E, ID](table: TableInfo[EC, E, ID])\n      def onConflictDoUpdate(entityCreator: EC): Update =\n        val updatedCols = table.all.columnNames\n          .filterNot(col => table.idColumn.exists(_.scalaName == col.scalaName))\n          .map(col => sql\"$col = EXCLUDED.$col\")\n          .reduceLeft((a, b) => sql\"$a, $b\")\n        sql\"\"\"INSERT INTO $table ${table.insertColumns} VALUES ($entityCreator)\n             |ON CONFLICT DO UPDATE SET $updatedCols\n             |\"\"\".stripMargin.update\n\n    assertEquals(\n      person\n        .onConflictDoUpdate(\n          Person(1, None, \"Smith\", false, OffsetDateTime.now, None)\n        )\n        .frag\n        .sqlString,\n      \"\"\"INSERT INTO person (id, first_name, last_name, is_admin, created, social_id) VALUES (?, ?, ?, ?, ?, ?)\n        |ON CONFLICT DO UPDATE SET first_name = EXCLUDED.first_name, last_name = EXCLUDED.last_name, is_admin = EXCLUDED.is_admin, created = EXCLUDED.created, social_id = EXCLUDED.social_id\n        |\"\"\".stripMargin\n    )\n\n    assertEquals(\n      car\n        .onConflictDoUpdate(\n          Car(\"Tesla\", 2L, 123, None, Color.Red, OffsetDateTime.now)\n        )\n        .frag\n        .sqlString,\n      \"\"\"INSERT INTO car (model, id, top_speed, vin, color, created) VALUES (?, ?, ?, ?, ?, ?)\n        |ON CONFLICT DO UPDATE SET model = EXCLUDED.model, top_speed = EXCLUDED.top_speed, vin = EXCLUDED.vin, color = EXCLUDED.color, created = EXCLUDED.created\n        |\"\"\".stripMargin\n    )\n\n    assertEquals(\n      noIdTableInfo\n        .onConflictDoUpdate(NoId(OffsetDateTime.now, \"abc\", \"def\"))\n        .frag\n        .sqlString,\n      \"\"\"INSERT INTO no_id (created_at, user_name, user_action) VALUES (?, ?, ?)\n        |ON CONFLICT DO UPDATE SET created_at = EXCLUDED.created_at, user_name = EXCLUDED.user_name, user_action = EXCLUDED.user_action\n        |\"\"\".stripMargin\n    )\nend tableInfoTests\n"
  },
  {
    "path": "magnum/src/test/scala/shared/TupleTests.scala",
    "content": "package shared\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\n\nimport java.sql.{PreparedStatement, ResultSet}\nimport java.time.OffsetDateTime\n\ndef tupleTests(suite: FunSuite, dbType: DbType, xa: () => Transactor)(using\n    Location,\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val car = TableInfo[Car, Car, Long]\n\n  val car1 = Car(\n    model = \"Ferrari F8 Tributo\",\n    id = 2L,\n    topSpeed = 212,\n    vinNumber = Some(124),\n    color = Color.Green,\n    created = OffsetDateTime.parse(\"2024-11-24T22:17:31.000000000Z\")\n  )\n\n  test(\"large tuple support does not override hand-rolled Tuple[2-4] codecs\"):\n    val tuple2ACodec = summon[DbCodec[(String, Color)]]\n    val tuple2BCodec = summon[DbCodec[(String, Int)]]\n    assert(tuple2ACodec.getClass == tuple2BCodec.getClass)\n    val tuple5ACodec = summon[DbCodec[(String, Color, Int, Long, Option[Int])]]\n    assert(tuple5ACodec.getClass != tuple2ACodec.getClass)\n    val tuple5BCodec = summon[DbCodec[(Int, Int, Int, Long, Option[Int])]]\n    assert(tuple5BCodec.getClass != tuple5ACodec.getClass)\n\n  test(\"large tuple select\"):\n    val tuple = xa().connect:\n      sql\"select model, color, top_speed, id, vin from car where id = 2\"\n        .query[(String, Color, Int, Long, Option[Int])]\n        .run()\n        .head\n    assert(\n      tuple == (car1.model, car1.color, car1.topSpeed, car1.id, car1.vinNumber)\n    )\n\n  test(\"large tuple select option\"):\n    assume(dbType != OracleDbType)\n    val tupleA = xa().connect:\n      sql\"select model, color, top_speed, id, vin from car where id = 1\"\n        .query[Option[(String, Color, Int, Long, Option[Int])]]\n        .run()\n        .head\n    assert(tupleA.isDefined)\n    val someTuple = xa().connect:\n      sql\"select 1, 1, 1, 1, 1, 1\"\n        .query[Option[(Int, Int, Int, Int, Int, Int)]]\n        .run()\n        .head\n    assert(someTuple.isDefined)\n    val noneTuple = xa().connect:\n      sql\"select 1, 1, 1, 1, null, 1\"\n        .query[Option[(Int, Int, Int, Int, Int, Int)]]\n        .run()\n        .head\n    assert(noneTuple.isEmpty)\n    val optionTupleOption = xa().connect:\n      sql\"select 1, 1, 1, 1, null, 1\"\n        .query[Option[(Int, Int, Int, Int, Option[Int], Int)]]\n        .run()\n        .head\n    assert(optionTupleOption.isDefined)\n\n  test(\"large tuple write\"):\n    val tup =\n      (\"Ford Edge\", 4L, 101, Option(12345), Color.Red, OffsetDateTime.now)\n    val tupWithSome =\n      (\"Ford Edge\", 5L, 101, Some(12345), Color.Red, OffsetDateTime.now)\n    xa().connect:\n      sql\"insert into $car ${car.insertColumns} values $tup\".update.run()\n      sql\"insert into $car ${car.insertColumns} values $tupWithSome\".update\n        .run()\n      val res =\n        sql\"select * from $car where ${car.id} = 4\".query[Car].run().head\n      assert(res.color == Color.Red)\n\n  test(\"large tuple in large tuple\"):\n    assume(dbType != OracleDbType)\n    xa().connect:\n      val tuple = sql\"select 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12\"\n        .query[(Int, Int, (Int, Int, Int, Int, Int, Int), Int, Int, Int, Int)]\n        .run()\n      assert(tuple.nonEmpty)\n\nend tupleTests\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/PgCodec.scala",
    "content": "package com.augustnagro.magnum.pg\n\nimport com.augustnagro.magnum.DbCodec\nimport org.postgresql.geometric.{\n  PGbox,\n  PGcircle,\n  PGline,\n  PGlseg,\n  PGpath,\n  PGpoint,\n  PGpolygon\n}\nimport org.postgresql.util.PGInterval\n\nimport java.sql\nimport java.sql.{JDBCType, PreparedStatement, ResultSet, Types}\nimport scala.reflect.ClassTag\nimport scala.collection.{IterableFactory, mutable as m}\nimport scala.compiletime.*\n\nobject PgCodec:\n\n  inline given ArrayCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[Array[A]]\n  ): DbCodec[Array[A]] =\n    inline erasedValue[A] match\n      // https://jdbc.postgresql.org/documentation/server-prepare/#arrays\n      case _: Short | _: java.lang.Short | _: Int | _: java.lang.Integer |\n          _: Long | _: java.lang.Long | _: Float | _: java.lang.Float |\n          _: Double | _: java.lang.Double | _: Boolean | _: java.lang.Boolean |\n          _: String | _: IArray[Byte] | _: Array[Byte] =>\n        arrayFastPath(aCodec, aArrayCodec, cTag)\n      case _ =>\n        arraySlowPath(aCodec, aArrayCodec, cTag)\n\n  inline given IArrayCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[IArray[A]]\n  ): DbCodec[IArray[A]] =\n    inline erasedValue[A] match\n      // https://jdbc.postgresql.org/documentation/server-prepare/#arrays\n      case _: Short | _: java.lang.Short | _: Int | _: java.lang.Integer |\n          _: Long | _: java.lang.Long | _: Float | _: java.lang.Float |\n          _: Double | _: java.lang.Double | _: Boolean | _: java.lang.Boolean |\n          _: String | _: IArray[Byte] | _: Array[Byte] =>\n        iArrayFastPath(aCodec, aArrayCodec, cTag)\n      case _ =>\n        iArraySlowPath(aCodec, aArrayCodec, cTag)\n\n  given SeqCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A]\n  ): DbCodec[Seq[A]] = new DbCodec[Seq[A]]:\n    require(aCodec.cols.length == 1)\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = aCodec.cols\n    def readSingle(resultSet: ResultSet, pos: Int): Seq[A] =\n      readCImpl(Seq, resultSet, pos)\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[Seq[A]] =\n      readCOptImpl(Seq, resultSet, pos)\n    def writeSingle(entity: Seq[A], ps: PreparedStatement, pos: Int): Unit =\n      writeCImpl(entity, ps, pos)\n\n  given ListCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A]\n  ): DbCodec[List[A]] = new DbCodec[List[A]]:\n    require(aCodec.cols.length == 1)\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = aCodec.cols\n    def readSingle(resultSet: ResultSet, pos: Int): List[A] =\n      readCImpl(List, resultSet, pos)\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[List[A]] =\n      readCOptImpl(List, resultSet, pos)\n    def writeSingle(entity: List[A], ps: PreparedStatement, pos: Int): Unit =\n      writeCImpl(entity, ps, pos)\n\n  given VectorCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A]\n  ): DbCodec[Vector[A]] = new DbCodec[Vector[A]]:\n    require(aCodec.cols.length == 1)\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = aCodec.cols\n    def readSingle(resultSet: ResultSet, pos: Int): Vector[A] =\n      readCImpl(Vector, resultSet, pos)\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[Vector[A]] =\n      readCOptImpl(Vector, resultSet, pos)\n    def writeSingle(entity: Vector[A], ps: PreparedStatement, pos: Int): Unit =\n      writeCImpl(entity, ps, pos)\n\n  given BufferCodec[A](using\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A]\n  ): DbCodec[m.Buffer[A]] = new DbCodec[m.Buffer[A]]:\n    require(aCodec.cols.length == 1)\n    private val jdbcTypeName = JDBCType.valueOf(aCodec.cols.head).getName\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = aCodec.cols\n    def readSingle(resultSet: ResultSet, pos: Int): m.Buffer[A] =\n      readCImpl(m.Buffer, resultSet, pos)\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[m.Buffer[A]] =\n      readCOptImpl(m.Buffer, resultSet, pos)\n    def writeSingle(\n        entity: m.Buffer[A],\n        ps: PreparedStatement,\n        pos: Int\n    ): Unit =\n      writeCImpl(entity, ps, pos)\n\n  given PgBoxCodec: DbCodec[PGbox] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGbox =\n      resultSet.getObject(pos, classOf[PGbox])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGbox] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGbox, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgCircleCodec: DbCodec[PGcircle] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGcircle =\n      resultSet.getObject(pos, classOf[PGcircle])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGcircle] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGcircle, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgIntervalCodec: DbCodec[PGInterval] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGInterval =\n      resultSet.getObject(pos, classOf[PGInterval])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGInterval] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGInterval, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgLineCodec: DbCodec[PGline] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGline =\n      resultSet.getObject(pos, classOf[PGline])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGline] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGline, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgLSegCodec: DbCodec[PGlseg] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGlseg =\n      resultSet.getObject(pos, classOf[PGlseg])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGlseg] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGlseg, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgPathCodec: DbCodec[PGpath] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGpath =\n      resultSet.getObject(pos, classOf[PGpath])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGpath] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGpath, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgPointCodec: DbCodec[PGpoint] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGpoint =\n      resultSet.getObject(pos, classOf[PGpoint])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGpoint] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGpoint, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  given PgPolygonCodec: DbCodec[PGpolygon] with\n    def queryRepr: String = \"?\"\n    val cols: IArray[Int] = IArray(Types.JAVA_OBJECT)\n    def readSingle(resultSet: ResultSet, pos: Int): PGpolygon =\n      resultSet.getObject(pos, classOf[PGpolygon])\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[PGpolygon] =\n      readOptImpl(this, resultSet, pos)\n    def writeSingle(entity: PGpolygon, ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  private def iArrayFastPath[A](\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[IArray[A]]\n  ): DbCodec[IArray[A]] = new DbCodec[IArray[A]]:\n    require(aCodec.cols.length == 1)\n\n    def queryRepr: String = \"?\"\n\n    val cols: IArray[Int] = aCodec.cols\n\n    def readSingle(resultSet: ResultSet, pos: Int): IArray[A] =\n      val jdbcArray = resultSet.getArray(pos)\n      try\n        val arr = aArrayCodec.readArray(jdbcArray.getArray)\n        IArray.unsafeFromArray(arr)\n      finally jdbcArray.free()\n\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[IArray[A]] =\n      val jdbcArray = resultSet.getArray(pos)\n      if resultSet.wasNull then None\n      else\n        try\n          val arr = aArrayCodec.readArray(jdbcArray.getArray)\n          Some(IArray.unsafeFromArray(arr))\n        finally jdbcArray.free()\n\n    def writeSingle(entity: IArray[A], ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  private def iArraySlowPath[A](\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[IArray[A]]\n  ): DbCodec[IArray[A]] = new DbCodec[IArray[A]]:\n    require(aCodec.cols.length == 1)\n\n    def queryRepr: String = \"?\"\n\n    val cols: IArray[Int] = aCodec.cols\n\n    def readSingle(resultSet: ResultSet, pos: Int): IArray[A] =\n      val jdbcArray = resultSet.getArray(pos)\n      try\n        val arr = aArrayCodec.readArray(jdbcArray.getArray)\n        IArray.unsafeFromArray(arr)\n      finally jdbcArray.free()\n\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[IArray[A]] =\n      val jdbcArray = resultSet.getArray(pos)\n      if resultSet.wasNull then None\n      else\n        try\n          val arr = aArrayCodec.readArray(jdbcArray.getArray)\n          Some(IArray.unsafeFromArray(arr))\n        finally jdbcArray.free()\n\n    def writeSingle(entity: IArray[A], ps: PreparedStatement, pos: Int): Unit =\n      val arr = entity.iterator.map(aArrayCodec.toArrayObj).toArray\n      val jdbcArr =\n        ps.getConnection.createArrayOf(aArrayCodec.jdbcTypeName, arr)\n      ps.setArray(pos, jdbcArr)\n\n  private def arrayFastPath[A](\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[Array[A]]\n  ): DbCodec[Array[A]] = new DbCodec[Array[A]]:\n    require(aCodec.cols.length == 1)\n\n    def queryRepr: String = \"?\"\n\n    val cols: IArray[Int] = aCodec.cols\n\n    def readSingle(resultSet: ResultSet, pos: Int): Array[A] =\n      val jdbcArray = resultSet.getArray(pos)\n      try aArrayCodec.readArray(jdbcArray.getArray)\n      finally jdbcArray.free()\n\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[Array[A]] =\n      val jdbcArray = resultSet.getArray(pos)\n      if resultSet.wasNull then None\n      else\n        try Some(aArrayCodec.readArray(jdbcArray.getArray))\n        finally jdbcArray.free()\n\n    def writeSingle(entity: Array[A], ps: PreparedStatement, pos: Int): Unit =\n      ps.setObject(pos, entity)\n\n  private def arraySlowPath[A](\n      aCodec: DbCodec[A],\n      aArrayCodec: SqlArrayCodec[A],\n      cTag: ClassTag[Array[A]]\n  ): DbCodec[Array[A]] = new DbCodec[Array[A]]:\n    require(aCodec.cols.length == 1)\n\n    def queryRepr: String = \"?\"\n\n    val cols: IArray[Int] = aCodec.cols\n\n    def readSingle(resultSet: ResultSet, pos: Int): Array[A] =\n      val jdbcArray = resultSet.getArray(pos)\n      try aArrayCodec.readArray(jdbcArray.getArray)\n      finally jdbcArray.free()\n\n    def readSingleOption(resultSet: ResultSet, pos: Int): Option[Array[A]] =\n      val jdbcArray = resultSet.getArray(pos)\n      if resultSet.wasNull then None\n      else\n        try Some(aArrayCodec.readArray(jdbcArray.getArray))\n        finally jdbcArray.free()\n\n    def writeSingle(entity: Array[A], ps: PreparedStatement, pos: Int): Unit =\n      val arr = entity.iterator.map(aArrayCodec.toArrayObj).toArray\n      val jdbcArr =\n        ps.getConnection.createArrayOf(aArrayCodec.jdbcTypeName, arr)\n      ps.setArray(pos, jdbcArr)\n\n  private inline def readCImpl[C[_], A](\n      factory: IterableFactory[C],\n      resultSet: ResultSet,\n      pos: Int\n  )(using arrayCodec: SqlArrayCodec[A]): C[A] =\n    val jdbcArray = resultSet.getArray(pos)\n    try\n      val arr = arrayCodec.readArray(jdbcArray.getArray)\n      factory.from(arr)\n    finally jdbcArray.free()\n\n  private inline def readCOptImpl[C[_], A](\n      factory: IterableFactory[C],\n      resultSet: ResultSet,\n      pos: Int\n  )(using arrayCodec: SqlArrayCodec[A]): Option[C[A]] =\n    val jdbcArray = resultSet.getArray(pos)\n    if resultSet.wasNull then None\n    else\n      try\n        val arr = arrayCodec.readArray(jdbcArray.getArray)\n        Some(factory.from(arr))\n      finally jdbcArray.free()\n\n  private inline def writeCImpl[C[_], A](\n      entity: Iterable[A],\n      ps: PreparedStatement,\n      pos: Int\n  )(using arrayCodec: SqlArrayCodec[A]): Unit =\n    val arr = entity.iterator.map(arrayCodec.toArrayObj).toArray\n    val jdbcArr =\n      ps.getConnection.createArrayOf(arrayCodec.jdbcTypeName, arr)\n    ps.setArray(pos, jdbcArr)\n\n  private inline def readOptImpl[A](\n      codec: DbCodec[A],\n      resultSet: ResultSet,\n      pos: Int\n  ): Option[A] =\n    val res = codec.readSingle(resultSet, pos)\n    if resultSet.wasNull then None\n    else Some(res)\n\nend PgCodec\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/SqlArrayCodec.scala",
    "content": "package com.augustnagro.magnum.pg\n\nimport java.sql\nimport java.sql.JDBCType\nimport java.time.{OffsetDateTime, ZoneOffset}\nimport java.util.UUID\nimport scala.reflect.ClassTag\nimport scala.collection.mutable as m\nimport org.postgresql.core.Oid\n\n/** Typeclass for converting between raw JDBC Object arrays and type A */\ntrait SqlArrayCodec[A]:\n  def jdbcTypeName: String\n\n  /** Converts the raw JDBC array to an IArray[A] */\n  def readArray(array: Object): Array[A]\n\n  /** Maps entity A to an object for use in JDBC Array */\n  def toArrayObj(entity: A): Object\n\nobject SqlArrayCodec:\n\n  given AnySqlArrayCodec: SqlArrayCodec[Any] with\n    val jdbcTypeName: String = JDBCType.JAVA_OBJECT.getName\n    def readArray(array: Object): Array[Any] =\n      array.asInstanceOf[Array[Any]]\n    def toArrayObj(entity: Any): Object = entity.asInstanceOf[Object]\n\n  given StringSqlArrayCodec: SqlArrayCodec[String] with\n    val jdbcTypeName: String = JDBCType.VARCHAR.getName\n    def readArray(array: Object): Array[String] =\n      array.asInstanceOf[Array[String]]\n    def toArrayObj(entity: String): Object = entity\n\n  given BooleanSqlArrayCodec: SqlArrayCodec[Boolean] with\n    val jdbcTypeName: String = JDBCType.BOOLEAN.getName\n    def readArray(array: Object): Array[Boolean] =\n      array match\n        case boxed: Array[java.lang.Boolean] => boxed.map(Boolean.unbox)\n        case primitive: Array[Boolean]       => primitive\n    def toArrayObj(entity: Boolean): Object = Boolean.box(entity)\n\n  given ByteSqlArrayCodec: SqlArrayCodec[Byte] with\n    val jdbcTypeName: String = JDBCType.TINYINT.getName\n    def readArray(array: Object): Array[Byte] =\n      array match\n        case boxed: Array[java.lang.Byte] => boxed.map(Byte.unbox)\n        case primitive: Array[Byte]       => primitive\n    def toArrayObj(entity: Byte): Object = Byte.box(entity)\n\n  given ShortSqlArrayCodec: SqlArrayCodec[Short] with\n    val jdbcTypeName: String = JDBCType.SMALLINT.getName\n    def readArray(array: Object): Array[Short] =\n      array match\n        case boxed: Array[java.lang.Short] => boxed.map(Short.unbox)\n        case primitive: Array[Short]       => primitive\n    def toArrayObj(entity: Short): Object = Short.box(entity)\n\n  given IntSqlArrayCodec: SqlArrayCodec[Int] with\n    val jdbcTypeName: String = JDBCType.INTEGER.getName\n    def readArray(array: Object): Array[Int] =\n      array match\n        case boxed: Array[Integer] => boxed.map(Int.unbox)\n        case primitive: Array[Int] => primitive\n    def toArrayObj(entity: Int): Object = Int.box(entity)\n\n  given LongSqlArrayCodec: SqlArrayCodec[Long] with\n    val jdbcTypeName: String = JDBCType.BIGINT.getName\n    def readArray(array: Object): Array[Long] =\n      array match\n        case boxed: Array[java.lang.Long] => boxed.map(Long.unbox)\n        case primitive: Array[Long]       => primitive\n    def toArrayObj(entity: Long): Object = Long.box(entity)\n\n  given FloatSqlArrayCodec: SqlArrayCodec[Float] with\n    val jdbcTypeName: String = JDBCType.REAL.getName\n    def readArray(array: Object): Array[Float] =\n      array match\n        case boxed: Array[java.lang.Float] => boxed.map(Float.unbox)\n        case primitive: Array[Float]       => primitive\n    def toArrayObj(entity: Float): Object = Float.box(entity)\n\n  given DoubleSqlArrayCodec: SqlArrayCodec[Double] with\n    val jdbcTypeName: String = JDBCType.DOUBLE.getName\n    def readArray(array: Object): Array[Double] =\n      array match\n        case boxed: Array[java.lang.Double] => boxed.map(Double.unbox)\n        case primitive: Array[Double]       => primitive\n    def toArrayObj(entity: Double): Object = Double.box(entity)\n\n  given SqlDateSqlArrayCodec: SqlArrayCodec[sql.Date] with\n    val jdbcTypeName: String = JDBCType.DATE.getName\n    def readArray(array: Object): Array[sql.Date] =\n      array.asInstanceOf[Array[sql.Date]]\n    def toArrayObj(entity: sql.Date): Object = entity\n\n  given SqlTimeSqlArrayCodec: SqlArrayCodec[sql.Time] with\n    val jdbcTypeName: String = JDBCType.TIME.getName\n    def readArray(array: Object): Array[sql.Time] =\n      array.asInstanceOf[Array[sql.Time]]\n    def toArrayObj(entity: sql.Time): Object = entity\n\n  given SqlTimestampSqlArrayCodec: SqlArrayCodec[sql.Timestamp] with\n    val jdbcTypeName: String = JDBCType.TIMESTAMP.getName\n    def readArray(array: Object): Array[sql.Timestamp] =\n      array.asInstanceOf[Array[sql.Timestamp]]\n    def toArrayObj(entity: sql.Timestamp): Object = entity\n\n  given OffsetDateTimeSqlArrayCodec: SqlArrayCodec[OffsetDateTime] with\n    val jdbcTypeName: String = JDBCType.TIMESTAMP.getName\n    def readArray(array: Object): Array[OffsetDateTime] =\n      array\n        .asInstanceOf[Array[sql.Timestamp]]\n        .map(_.toInstant.atOffset(ZoneOffset.UTC))\n    def toArrayObj(entity: OffsetDateTime): Object =\n      sql.Timestamp.from(entity.toInstant)\n\n  given UUIDSqlArrayCodec: SqlArrayCodec[UUID] with\n    val jdbcTypeName: String = Oid.toString(Oid.UUID)\n    def readArray(array: Object): Array[UUID] =\n      array.asInstanceOf[Array[UUID]]\n    def toArrayObj(entity: UUID): Object = entity\n\n  given ArraySqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A],\n      cTag: ClassTag[Array[A]]\n  ): SqlArrayCodec[Array[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[Array[A]] =\n      val objArr = array.asInstanceOf[Array[Object]]\n      objArr.map(aCodec.readArray)\n    def toArrayObj(entity: Array[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\n\n  given IArraySqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A],\n      cTag: ClassTag[IArray[A]]\n  ): SqlArrayCodec[IArray[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[IArray[A]] =\n      val objArray = array.asInstanceOf[Array[Object]]\n      objArray.map(obj => IArray.unsafeFromArray(aCodec.readArray(obj)))\n    def toArrayObj(entity: IArray[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\n\n  given SeqSqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A]\n  ): SqlArrayCodec[Seq[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[Seq[A]] =\n      val objArray = array.asInstanceOf[Array[Object]]\n      objArray.map(obj => Seq.from(aCodec.readArray(obj)))\n    def toArrayObj(entity: Seq[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\n\n  given ListSqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A]\n  ): SqlArrayCodec[List[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[List[A]] =\n      val objArray = array.asInstanceOf[Array[Object]]\n      objArray.map(obj => List.from(aCodec.readArray(obj)))\n    def toArrayObj(entity: List[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\n\n  given VectorSqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A]\n  ): SqlArrayCodec[Vector[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[Vector[A]] =\n      val objArr = array.asInstanceOf[Array[Object]]\n      objArr.map(obj => Vector.from(aCodec.readArray(obj)))\n    def toArrayObj(entity: Vector[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\n\n  given BufferSqlArrayCodec[A](using\n      aCodec: SqlArrayCodec[A]\n  ): SqlArrayCodec[m.Buffer[A]] with\n    def jdbcTypeName: String = aCodec.jdbcTypeName\n    def readArray(array: Object): Array[m.Buffer[A]] =\n      val objArray = array.asInstanceOf[Array[Object]]\n      objArray.map(obj => m.Buffer.from(aCodec.readArray(obj)))\n    def toArrayObj(entity: m.Buffer[A]): Object =\n      entity.iterator.map(aCodec.toArrayObj).toArray\nend SqlArrayCodec\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/enums/PgEnumDbCodec.scala",
    "content": "package com.augustnagro.magnum.pg.enums\n\nimport com.augustnagro.magnum.{DbCodec, DerivingUtil}\n\nimport java.sql.{JDBCType, PreparedStatement, ResultSet, Types}\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonFrom,\n  summonInline\n}\nimport scala.quoted.*\nimport scala.reflect.ClassTag\n\ninline given PgEnumDbCodec[A <: scala.reflect.Enum: Mirror.SumOf]: DbCodec[A] =\n  ${ pgEnumDbCodecImpl[A] }\n\nprivate def pgEnumDbCodecImpl[A: Type](using Quotes): Expr[DbCodec[A]] =\n  import quotes.reflect.*\n  val mirror = Expr.summon[Mirror.SumOf[A]].getOrElse {\n    report.errorAndAbort(\n      \"Can only derive PgEnumDbCodec for simple (non ADT) Enums\"\n    )\n  }\n  mirror match\n    case '{\n          $ms: Mirror.SumOf[A] {\n            type MirroredElemTypes = mets\n            type MirroredElemLabels = mels\n            type MirroredLabel = mel\n          }\n        } =>\n      val nameMapExpr = DerivingUtil.buildSqlNameMapForEnum[A, mels, mets]\n      val melExpr = Expr(Type.valueOfConstant[mel].get.toString)\n      val sqlTypeNameExpr: Expr[String] =\n        DerivingUtil\n          .sqlTableNameAnnot[A]\n          .map(sqlNameExpr => '{ $sqlNameExpr.name })\n          .orElse(\n            DerivingUtil\n              .tableAnnot[A]\n              .map(tableExpr =>\n                '{ $tableExpr.nameMapper.toTableName($melExpr) }\n              )\n          )\n          .getOrElse(melExpr)\n      '{\n        new DbCodec[A] {\n          val nameMap: Seq[(String, A)] = $nameMapExpr\n          val cols: IArray[Int] = IArray(Types.VARCHAR)\n          def readSingle(rs: ResultSet, pos: Int): A =\n            val str = rs.getString(pos)\n            nameMap.find((name, _) => name == str) match\n              case Some((_, v)) => v\n              case None =>\n                throw IllegalArgumentException(\n                  str + \" not convertible to \" + $melExpr\n                )\n          def readSingleOption(rs: ResultSet, pos: Int): Option[A] =\n            Option(rs.getString(pos)).map(str =>\n              nameMap.find((name, _) => name == str) match\n                case Some((_, v)) => v\n                case None =>\n                  throw IllegalArgumentException(\n                    str + \" not convertible to \" + $melExpr\n                  )\n            )\n          def writeSingle(entity: A, ps: PreparedStatement, pos: Int): Unit =\n            nameMap.find((_, v) => v == entity) match\n              case Some((k, _)) => ps.setString(pos, k)\n              case None =>\n                throw IllegalArgumentException(\n                  entity.toString + \" not convertible to \" + $melExpr\n                )\n          def queryRepr: String = \"?::\" + ${ sqlTypeNameExpr }\n        }\n      }\n  end match\nend pgEnumDbCodecImpl\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/enums/PgEnumToScalaEnumSqlArrayCodec.scala",
    "content": "package com.augustnagro.magnum.pg.enums\n\nimport com.augustnagro.magnum.DerivingUtil\nimport com.augustnagro.magnum.pg.SqlArrayCodec\n\nimport java.sql.JDBCType\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonFrom,\n  summonInline\n}\nimport scala.quoted.*\nimport scala.reflect.ClassTag\n\n/** Supports mapping between Postgres Enums and Scala Enums in multi-dimensional\n  * arrays, such as between\n  * {{{create type Color as enum ('Red', 'Green', 'Blue');}}} and\n  * {{{\n  *   enum Color:\n  *     case Red, Green, Blue\n  * }}}\n  */\ninline given PgEnumToScalaEnumSqlArrayCodec[\n    A <: scala.reflect.Enum: Mirror.SumOf: ClassTag\n]: SqlArrayCodec[A] =\n  ${ pgEnumToScalaEnumSqlArrayCodecImpl[A] }\n\nprivate def pgEnumToScalaEnumSqlArrayCodecImpl[A: Type](using\n    Quotes\n): Expr[SqlArrayCodec[A]] =\n  import quotes.reflect.*\n  val mirror = Expr.summon[Mirror.SumOf[A]].getOrElse {\n    report.errorAndAbort(\n      \"Can only derive SqlArrayCodec for simple (non ADT) Enums\"\n    )\n  }\n  mirror match\n    case '{\n          $ms: Mirror.SumOf[A] {\n            type MirroredElemTypes = mets\n            type MirroredElemLabels = mels\n            type MirroredLabel = mel\n          }\n        } =>\n      val nameMapExpr = DerivingUtil.buildSqlNameMapForEnum[A, mels, mets]\n      val melExpr = Expr(Type.valueOfConstant[mel].get.toString)\n      val classTagExpr = Expr.summon[ClassTag[A]].get\n      val sqlTypeNameExpr: Expr[String] =\n        DerivingUtil\n          .sqlTableNameAnnot[A]\n          .map(sqlNameExpr => '{ $sqlNameExpr.name })\n          .orElse(\n            DerivingUtil\n              .tableAnnot[A]\n              .map(tableExpr =>\n                '{ $tableExpr.nameMapper.toTableName($melExpr) }\n              )\n          )\n          .getOrElse(melExpr)\n      '{\n        new SqlArrayCodec[A]:\n          val nameMap = $nameMapExpr\n          val ct = $classTagExpr\n          val jdbcTypeName: String = $sqlTypeNameExpr\n          def readArray(array: Object): Array[A] =\n            array\n              .asInstanceOf[Array[String]]\n              .map(enumName =>\n                nameMap.find((k, _) => k == enumName) match\n                  case Some((_, v)) => v\n                  case None =>\n                    throw IllegalArgumentException(\n                      enumName + \" not convertible to \" + $melExpr\n                    )\n              )(using ct)\n          def toArrayObj(entity: A): Object =\n            nameMap.find((_, v) => v == entity) match\n              case Some((k, _)) => k\n              case None =>\n                throw IllegalArgumentException(\n                  entity.toString + \" not convertible to \" + $melExpr\n                )\n      }\n  end match\nend pgEnumToScalaEnumSqlArrayCodecImpl\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/enums/PgStringToScalaEnumSqlArrayCodec.scala",
    "content": "package com.augustnagro.magnum.pg.enums\n\nimport com.augustnagro.magnum.DerivingUtil\nimport com.augustnagro.magnum.pg.SqlArrayCodec\n\nimport java.sql.JDBCType\nimport scala.deriving.Mirror\nimport scala.compiletime.{\n  constValue,\n  constValueTuple,\n  erasedValue,\n  error,\n  summonFrom,\n  summonInline\n}\nimport scala.quoted.*\nimport scala.reflect.ClassTag\n\n/** Supports mapping between Postgres Strings (Varchar, text, etc) and Scala\n  * Enums in multi-dimensional arrays.\n  */\ninline given PgStringToScalaEnumSqlArrayCodec[\n    A <: scala.reflect.Enum: Mirror.SumOf: ClassTag\n]: SqlArrayCodec[A] =\n  ${ pgStringToScalaEnumSqlArrayCodecImpl[A] }\n\nprivate def pgStringToScalaEnumSqlArrayCodecImpl[A: Type](using\n    Quotes\n): Expr[SqlArrayCodec[A]] =\n  import quotes.reflect.*\n  val mirror = Expr.summon[Mirror.SumOf[A]].getOrElse {\n    report.errorAndAbort(\n      \"Can only derive SqlArrayCodec for simple (non ADT) Enums\"\n    )\n  }\n  mirror match\n    case '{\n          $ms: Mirror.SumOf[A] {\n            type MirroredElemTypes = mets\n            type MirroredElemLabels = mels\n            type MirroredLabel = mel\n          }\n        } =>\n      val nameMapExpr = DerivingUtil.buildSqlNameMapForEnum[A, mels, mets]\n      val melExpr = Expr(Type.valueOfConstant[mel].get.toString)\n      val classTagExpr = Expr.summon[ClassTag[A]].get\n      '{\n        new SqlArrayCodec[A]:\n          val nameMap = $nameMapExpr\n          val ct = $classTagExpr\n          val jdbcTypeName: String = JDBCType.VARCHAR.getName\n          def readArray(array: Object): Array[A] =\n            array\n              .asInstanceOf[Array[String]]\n              .map(enumName =>\n                nameMap.find((k, _) => k == enumName) match\n                  case Some((_, v)) => v\n                  case None =>\n                    throw IllegalArgumentException(\n                      enumName + \" not convertible to \" + $melExpr\n                    )\n              )(using ct)\n          def toArrayObj(entity: A): Object =\n            nameMap.find((_, v) => v == entity) match\n              case Some((k, _)) => k\n              case None =>\n                throw IllegalArgumentException(\n                  entity.toString + \" not convertible to \" + $melExpr\n                )\n      }\n  end match\nend pgStringToScalaEnumSqlArrayCodecImpl\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/json/JsonBDbCodec.scala",
    "content": "package com.augustnagro.magnum.pg.json\n\nimport com.augustnagro.magnum.DbCodec\nimport org.postgresql.util.PGobject\n\nimport java.sql.{PreparedStatement, ResultSet, Types}\n\ntrait JsonBDbCodec[A] extends DbCodec[A]:\n\n  def encode(a: A): String\n\n  def decode(json: String): A\n\n  override def queryRepr: String = \"?\"\n\n  override val cols: IArray[Int] = IArray(Types.OTHER)\n\n  override def readSingle(resultSet: ResultSet, pos: Int): A =\n    decode(resultSet.getString(pos))\n\n  override def readSingleOption(resultSet: ResultSet, pos: Int): Option[A] =\n    val rawJson = resultSet.getString(pos)\n    if rawJson == null then None\n    else Some(decode(rawJson))\n\n  override def writeSingle(entity: A, ps: PreparedStatement, pos: Int): Unit =\n    val jsonObject = PGobject()\n    jsonObject.setType(\"jsonb\")\n    jsonObject.setValue(encode(entity))\n    ps.setObject(pos, jsonObject)\n\nend JsonBDbCodec\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/json/JsonDbCodec.scala",
    "content": "package com.augustnagro.magnum.pg.json\n\nimport com.augustnagro.magnum.DbCodec\nimport org.postgresql.util.PGobject\n\nimport java.sql.{PreparedStatement, ResultSet, Types}\n\ntrait JsonDbCodec[A] extends DbCodec[A]:\n\n  def encode(a: A): String\n\n  def decode(json: String): A\n\n  override def queryRepr: String = \"?\"\n\n  override val cols: IArray[Int] = IArray(Types.OTHER)\n\n  override def readSingle(resultSet: ResultSet, pos: Int): A =\n    decode(resultSet.getString(pos))\n\n  override def readSingleOption(resultSet: ResultSet, pos: Int): Option[A] =\n    val rawJson = resultSet.getString(pos)\n    if rawJson == null then None\n    else Some(decode(rawJson))\n\n  override def writeSingle(entity: A, ps: PreparedStatement, pos: Int): Unit =\n    val jsonObject = PGobject()\n    jsonObject.setType(\"json\")\n    jsonObject.setValue(encode(entity))\n    ps.setObject(pos, jsonObject)\n\nend JsonDbCodec\n"
  },
  {
    "path": "magnum-pg/src/main/scala/com/augustnagro/magnum/pg/xml/XmlDbCodec.scala",
    "content": "package com.augustnagro.magnum.pg.xml\n\nimport com.augustnagro.magnum.DbCodec\nimport org.postgresql.util.PGobject\n\nimport java.sql.{PreparedStatement, ResultSet, Types}\n\ntrait XmlDbCodec[A] extends DbCodec[A]:\n\n  def encode(a: A): String\n\n  def decode(xml: String): A\n\n  override def queryRepr: String = \"?\"\n\n  override val cols: IArray[Int] = IArray(Types.SQLXML)\n\n  override def readSingle(resultSet: ResultSet, pos: Int): A =\n    decode(resultSet.getString(pos))\n\n  override def readSingleOption(resultSet: ResultSet, pos: Int): Option[A] =\n    val xmlString = resultSet.getString(pos)\n    if xmlString == null then None\n    else Some(decode(xmlString))\n\n  override def writeSingle(entity: A, ps: PreparedStatement, pos: Int): Unit =\n    val xmlObject = PGobject()\n    xmlObject.setType(\"xml\")\n    xmlObject.setValue(encode(entity))\n    ps.setObject(pos, xmlObject)\n\nend XmlDbCodec\n"
  },
  {
    "path": "magnum-pg/src/test/resources/pg-car.sql",
    "content": "drop table if exists mag_car;\n\ncreate table mag_car (\n    id bigint primary key,\n    text_colors text[] not null,\n    text_color_map text[][] not null,\n    last_service json,\n    my_json_b jsonb,\n    my_xml xml\n);\n\ninsert into mag_car values\n(1, '{\"red_orange\", \"Greenish\"}', '{{\"red_orange\", \"red_orange\"}, {\"Greenish\", \"Greenish\"}}', '{\"mechanic\": \"Bob\", \"date\": \"2024-05-04\"}', '{\"a\": [1, 2, 3], \"b\": \"hello world\"}', '<color>blue</color>'),\n(2, '{\"Greenish\", \"blue\"}', '{{\"red_orange\", \"Greenish\"}, {\"Greenish\", \"blue\"}}', null, null, null);\n"
  },
  {
    "path": "magnum-pg/src/test/resources/pg-service-list.sql",
    "content": "drop table if exists mag_service_list;\n\ncreate table mag_service_list (\n    id bigint primary key generated by default as identity,\n    service json,\n    created timestamptz default now()\n);\n"
  },
  {
    "path": "magnum-pg/src/test/resources/pg-user.sql",
    "content": "drop table if exists mag_user;\ndrop type if exists colour;\n\ncreate type Colour as enum ('red_orange', 'Greenish', 'blue');\n\ncreate table mag_user (\n    id bigint primary key,\n    name text not null,\n    friends text[] not null,\n    matrix integer[][] not null,\n    test integer[] not null,\n    dates timestamptz[] not null,\n    bx box not null,\n    c circle not null,\n    iv interval not null,\n    l line not null,\n    lSeg lseg not null,\n    p path not null,\n    pnt point not null,\n    poly polygon not null,\n    colors Colour[] not null,\n    colorMap Colour[][] not null,\n    color Colour not null,\n    idUuid uuid not null,\n    uuids uuid[] not null\n);\n\ninsert into mag_user values\n(1, 'Abby', '{\"Jane\", \"Mary\"}', '{{1, 2}, {3, 4}, {5, 6}}', '{1}', '{\"2023-07-30T12:21:36Z\", \"2023-07-30T12:21:37Z\"}', '(1, 2, 3, 4)', '<(1, 2), 3>', '1 hour', '{1, 1, 1}', '1, 1, 2, 2', '[(1, 1), (2, 2)]', '(1, 1)', '((0, 0), (-1, 1), (1, 1))', '{\"red_orange\", \"Greenish\"}', '{{\"red_orange\", \"red_orange\"}, {\"Greenish\", \"Greenish\"}}', 'blue', '00000000-0000-0000-0000-000000000001', '{\"00000000-0000-0001-0000-000000000000\", \"00000000-0000-0001-0000-000000000001\"}'),\n(2, 'Jacob', '{\"Grace\", \"Aubrey\"}', '{{7, 8}, {9, 10}}', '{}', '{}', '(5, 6, 7, 8)', '<(4, 5), 6>', '2 days', '{2, 2, 2}', '2, 2, 3, 3', '[(2, 2), (3, 3)]', '(2, 2)', '((0, 0), (-1, -1), (1, -1))', '{\"Greenish\", \"blue\"}', '{{\"red_orange\", \"Greenish\"}, {\"Greenish\", \"blue\"}}', 'blue', '00000000-0000-0000-0000-000000000002', '{\"00000000-0000-0002-0000-000000000000\"}');\n"
  },
  {
    "path": "magnum-pg/src/test/scala/CirceJsonBDbCodec.scala",
    "content": "import com.augustnagro.magnum.pg.json.JsonBDbCodec\nimport io.circe.{Codec, Decoder, Encoder, JsonObject}\nimport io.circe.parser.{decode as circeDecode, *}\nimport io.circe.syntax.*\n\ntrait CirceJsonBDbCodec[A] extends JsonBDbCodec[A]\n\nobject CirceJsonBDbCodec:\n  def derived[A: Encoder: Decoder]: CirceJsonBDbCodec[A] = new:\n    def encode(a: A): String = a.asJson.toString\n    def decode(json: String): A = circeDecode[A](json) match\n      case Right(a)  => a\n      case Left(err) => throw err\n"
  },
  {
    "path": "magnum-pg/src/test/scala/CirceJsonDbCodec.scala",
    "content": "import com.augustnagro.magnum.pg.json.JsonDbCodec\nimport io.circe.{Codec, Decoder, Encoder, JsonObject}\nimport io.circe.parser.{decode as circeDecode, *}\nimport io.circe.syntax.*\n\ntrait CirceJsonDbCodec[A] extends JsonDbCodec[A]\n\nobject CirceJsonDbCodec:\n\n  def derived[A: Encoder: Decoder]: CirceJsonDbCodec[A] = new:\n    def encode(a: A): String = a.asJson.toString\n    def decode(json: String): A = circeDecode[A](json) match\n      case Right(a)  => a\n      case Left(err) => throw err\n"
  },
  {
    "path": "magnum-pg/src/test/scala/Color.scala",
    "content": "import com.augustnagro.magnum.{\n  DbCodec,\n  PostgresDbType,\n  SqlName,\n  SqlNameMapper,\n  Table\n}\n\n@SqlName(\"colour\")\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\nenum Color derives DbCodec:\n  case RedOrange\n  @SqlName(\"Greenish\") case Green\n  case Blue\n"
  },
  {
    "path": "magnum-pg/src/test/scala/LastService.scala",
    "content": "import com.augustnagro.magnum.pg.json.JsonDbCodec\n\nimport io.circe.Codec\nimport java.time.LocalDate\n\ncase class LastService(mechanic: String, date: LocalDate)\n    derives Codec.AsObject, CirceJsonDbCodec\n"
  },
  {
    "path": "magnum-pg/src/test/scala/MagCar.scala",
    "content": "import com.augustnagro.magnum.{\n  DbCodec,\n  Id,\n  PostgresDbType,\n  SqlNameMapper,\n  Table\n}\nimport com.augustnagro.magnum.pg.PgCodec.given\nimport com.augustnagro.magnum.pg.enums.PgStringToScalaEnumSqlArrayCodec\n\n@Table(PostgresDbType, SqlNameMapper.CamelToSnakeCase)\ncase class MagCar(\n    @Id id: Long,\n    textColors: Seq[Color],\n    textColorMap: Vector[List[Color]],\n    lastService: Option[LastService],\n    myJsonB: Option[MyJsonB],\n    myXml: Option[MyXml]\n) derives DbCodec\n"
  },
  {
    "path": "magnum-pg/src/test/scala/MagUser.scala",
    "content": "import com.augustnagro.magnum.{DbCodec, Id, PostgresDbType, SqlName, Table}\nimport com.augustnagro.magnum.pg.PgCodec.given\nimport com.augustnagro.magnum.pg.enums.PgEnumToScalaEnumSqlArrayCodec\nimport com.augustnagro.magnum.pg.enums.PgEnumDbCodec\nimport org.postgresql.geometric.{\n  PGbox,\n  PGcircle,\n  PGline,\n  PGlseg,\n  PGpath,\n  PGpoint,\n  PGpolygon\n}\nimport org.postgresql.util.PGInterval\n\nimport java.time.OffsetDateTime\nimport java.util.Objects\nimport java.util.UUID\n\n@Table(PostgresDbType)\n@SqlName(\"mag_user\")\ncase class MagUser(\n    @Id id: Long,\n    name: String,\n    friends: Vector[String],\n    matrix: IArray[IArray[Int]],\n    test: IArray[Int],\n    dates: IArray[OffsetDateTime],\n    bx: PGbox,\n    c: PGcircle,\n    iv: PGInterval,\n    l: PGline,\n    lSeg: PGlseg,\n    p: PGpath,\n    pnt: PGpoint,\n    poly: PGpolygon,\n    colors: List[Color],\n    colorMap: List[Vector[Color]],\n    color: Color,\n    idUuid: UUID,\n    uuids: List[UUID]\n) derives DbCodec:\n  override def equals(obj: Any): Boolean =\n    obj match\n      case u: MagUser =>\n        id == u.id && name == u.name && friends == u.friends &&\n        Objects.deepEquals(matrix, u.matrix) &&\n        Objects.deepEquals(test, u.test) &&\n        Objects.deepEquals(dates, u.dates) &&\n        bx == u.bx && c == u.c && iv == u.iv && l == u.l && lSeg == u.lSeg &&\n        p == u.p && pnt == u.pnt && poly == u.poly &&\n        colors == u.colors && colorMap == u.colorMap && color == u.color &&\n        idUuid == u.idUuid && uuids == u.uuids\n      case _ => false\nend MagUser\n"
  },
  {
    "path": "magnum-pg/src/test/scala/MyJsonB.scala",
    "content": "import io.circe.Codec\n\ncase class MyJsonB(a: Vector[Int], b: String)\n    derives Codec.AsObject, CirceJsonBDbCodec\n"
  },
  {
    "path": "magnum-pg/src/test/scala/MyXml.scala",
    "content": "import com.augustnagro.magnum.DbCodec\nimport com.augustnagro.magnum.pg.xml.XmlDbCodec\n\nimport scala.xml.{Document, XML, Elem}\n\ncase class MyXml(elem: Elem)\n\nobject MyXml:\n  given XmlDbCodec[MyXml] with\n    def encode(a: MyXml): String = a.elem.toString\n    def decode(xml: String): MyXml = MyXml(XML.loadString(xml))\n"
  },
  {
    "path": "magnum-pg/src/test/scala/PgCodecTests.scala",
    "content": "import com.dimafeng.testcontainers.PostgreSQLContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport munit.{AnyFixture, FunSuite}\nimport org.testcontainers.utility.DockerImageName\nimport org.postgresql.ds.PGSimpleDataSource\nimport org.postgresql.geometric.*\nimport com.augustnagro.magnum.*\nimport com.augustnagro.magnum.pg.PgCodec.given\nimport com.augustnagro.magnum.pg.enums.PgEnumToScalaEnumSqlArrayCodec\nimport org.postgresql.util.PGInterval\n\nimport java.nio.file.{Files, Path}\nimport java.time.{LocalDate, OffsetDateTime, ZoneOffset}\nimport java.util.Objects\nimport java.util.UUID\nimport javax.sql.DataSource\nimport scala.util.Using.Manager\n\nclass PgCodecTests extends FunSuite, TestContainersFixtures:\n  val userRepo = Repo[MagUser, MagUser, Long]\n\n  val allUsers = Vector(\n    MagUser(\n      id = 1L,\n      name = \"Abby\",\n      friends = Vector(\"Jane\", \"Mary\"),\n      matrix = IArray(IArray(1, 2), IArray(3, 4), IArray(5, 6)),\n      test = IArray(1),\n      dates = IArray(\n        OffsetDateTime.parse(\"2023-07-30T12:21:36Z\"),\n        OffsetDateTime.parse(\"2023-07-30T12:21:37Z\")\n      ),\n      bx = PGbox(1, 2, 3, 4),\n      c = PGcircle(1, 2, 3),\n      iv = PGInterval(\"1 hour\"),\n      l = PGline(1, 1, 1),\n      lSeg = PGlseg(1, 1, 2, 2),\n      p = PGpath(Array(PGpoint(1, 1), PGpoint(2, 2)), true),\n      pnt = PGpoint(1, 1),\n      poly = PGpolygon(Array(PGpoint(0, 0), PGpoint(-1, 1), PGpoint(1, 1))),\n      colors = List(Color.RedOrange, Color.Green),\n      colorMap = List(\n        Vector(Color.RedOrange, Color.RedOrange),\n        Vector(Color.Green, Color.Green)\n      ),\n      color = Color.Blue,\n      idUuid = UUID.fromString(\"00000000-0000-0000-0000-000000000001\"),\n      uuids = List(\n        UUID.fromString(\"00000000-0000-0001-0000-000000000000\"),\n        UUID.fromString(\"00000000-0000-0001-0000-000000000001\")\n      )\n    ),\n    MagUser(\n      id = 2L,\n      name = \"Jacob\",\n      friends = Vector(\"Grace\", \"Aubrey\"),\n      matrix = IArray(IArray(7, 8), IArray(9, 10)),\n      test = IArray.emptyIntIArray,\n      dates = IArray.empty,\n      bx = PGbox(5, 6, 7, 8),\n      c = PGcircle(4, 5, 6),\n      iv = PGInterval(\"2 days\"),\n      l = PGline(2, 2, 2),\n      lSeg = PGlseg(2, 2, 3, 3),\n      p = PGpath(Array(PGpoint(2, 2), PGpoint(3, 3)), true),\n      pnt = PGpoint(2, 2),\n      poly = PGpolygon(Array(PGpoint(0, 0), PGpoint(-1, -1), PGpoint(1, -1))),\n      colors = List(Color.Green, Color.Blue),\n      colorMap = List(\n        Vector(Color.RedOrange, Color.Green),\n        Vector(Color.Green, Color.Blue)\n      ),\n      color = Color.Blue,\n      idUuid = UUID.fromString(\"00000000-0000-0000-0000-000000000002\"),\n      uuids = List(\n        UUID.fromString(\"00000000-0000-0002-0000-000000000000\")\n      )\n    )\n  )\n\n  val carRepo = Repo[MagCar, MagCar, Long]\n\n  val allCars = Vector(\n    MagCar(\n      id = 1,\n      textColors = Seq(Color.RedOrange, Color.Green),\n      textColorMap = Vector(\n        List(Color.RedOrange, Color.RedOrange),\n        List(Color.Green, Color.Green)\n      ),\n      lastService = Some(LastService(\"Bob\", LocalDate.of(2024, 5, 4))),\n      myJsonB = Some(MyJsonB(Vector(1, 2, 3), \"hello world\")),\n      myXml = Some(MyXml(<color>blue</color>))\n    ),\n    MagCar(\n      id = 2,\n      textColors = Seq(Color.Green, Color.Blue),\n      textColorMap = Vector(\n        List(Color.RedOrange, Color.Green),\n        List(Color.Green, Color.Blue)\n      ),\n      lastService = None,\n      myJsonB = None,\n      myXml = None\n    )\n  )\n\n  test(\"select all MagUser\"):\n    connect(ds()):\n      assert(userRepo.findAll == allUsers)\n\n  test(\"select all MagCar\"):\n    connect(ds()):\n      assert(carRepo.findAll == allCars)\n\n  test(\"insert MagUser\"):\n    connect(ds()):\n      val u = MagUser(\n        id = 3L,\n        name = \"Matt\",\n        friends = Vector.empty,\n        matrix = IArray(IArray(1, 2), IArray(3, 4)),\n        test = IArray(4),\n        dates = IArray(OffsetDateTime.parse(\"2023-07-30T13:57:29.059335Z\")),\n        bx = PGbox(1, 2, 3, 4),\n        c = PGcircle(1, 1, 1),\n        iv = PGInterval(\"1 minute\"),\n        l = PGline(3, 4, 5),\n        lSeg = PGlseg(0, 0, -1, -1),\n        p = PGpath(Array(PGpoint(3, 3), PGpoint(4, 4)), true),\n        pnt = PGpoint(3, 4),\n        poly = PGpolygon(Array(PGpoint(0, 0), PGpoint(-1, 1), PGpoint(1, 1))),\n        colors = List(Color.Blue),\n        colorMap = List(Vector(Color.Blue), Vector(Color.Green)),\n        color = Color.Green,\n        idUuid = UUID.fromString(\"00000000-0000-0000-0000-000000000003\"),\n        uuids = List(\n          UUID.fromString(\"00000000-0000-0003-0000-000000000000\"),\n          UUID.fromString(\"00000000-0000-0003-0000-000000000001\")\n        )\n      )\n      userRepo.insert(u)\n      val dbU = userRepo.findById(3L).get\n      assert(dbU == u)\n\n  test(\"select MagUser where uuid in set\"):\n    connect(ds()):\n      val ids = Vector(\n        UUID.fromString(\"00000000-0000-0000-0000-000000000001\"),\n        UUID.fromString(\"00000000-0000-0000-0000-000000000002\")\n      )\n      val users =\n        sql\"SELECT * FROM mag_user WHERE idUuid = ANY($ids)\"\n          .query[MagUser]\n          .run()\n      assert(users == allUsers)\n\n  test(\"insert MagCar\"):\n    connect(ds()):\n      val c = MagCar(\n        id = 3L,\n        textColors = Vector(Color.RedOrange, Color.RedOrange),\n        textColorMap = Vector(\n          List(Color.RedOrange, Color.RedOrange),\n          List(Color.RedOrange, Color.RedOrange)\n        ),\n        lastService = Some(LastService(\"James\", LocalDate.of(1970, 4, 22))),\n        myJsonB = None,\n        myXml = None\n      )\n      carRepo.insert(c)\n      val dbC = carRepo.findById(3L).get\n      assert(dbC == c)\n\n  test(\"update MagUser arrays\"):\n    connect(ds()):\n      val newMatrix = IArray(IArray(0, 0), IArray(0, 9))\n      sql\"UPDATE mag_user SET matrix = $newMatrix WHERE id = 2\".update\n        .run()\n      val newUser = userRepo.findById(2L).get\n      assert(Objects.deepEquals(newUser.matrix, newMatrix))\n\n  test(\"update MagCar arrays\"):\n    connect(ds()):\n      val newTextColorMap =\n        Vector(List(Color.Blue, Color.Blue), List(Color.Blue, Color.Blue))\n      sql\"UPDATE mag_car SET text_color_map = $newTextColorMap WHERE id = 2\".update\n        .run()\n      val newCar = carRepo.findById(2L).get\n      assert(newCar.textColorMap == newTextColorMap)\n\n  test(\"MagCar xml string values\"):\n    connect(ds()):\n      val found =\n        sql\"SELECT my_xml FROM mag_car\"\n          .query[Option[MyXml]]\n          .run()\n          .flatten\n          .map(_.elem.toString)\n      val expected = allCars.flatMap(_.myXml).map(_.elem.toString)\n      assert(found == expected)\n\n  test(\"where = ANY()\"):\n    connect(ds()):\n      val ids = Vector(1L, 2L)\n      val cars =\n        sql\"SELECT * FROM mag_car WHERE id = ANY($ids)\".query[MagCar].run()\n      assert(cars == allCars)\n\n  test(\"insert MagServiceList interpolated\"):\n    connect(ds()):\n      val service = LastService(\"James\", LocalDate.of(1970, 4, 22))\n      val frag = sql\"INSERT INTO mag_service_list (service) VALUES ($service)\"\n      assertEquals(\n        frag.sqlString,\n        \"INSERT INTO mag_service_list (service) VALUES (?)\"\n      )\n      frag.update.run()\n      assertEquals(\n        sql\"SELECT service FROM mag_service_list\".query[LastService].run().head,\n        service\n      )\n\n  val pgContainer = ForAllContainerFixture(\n    PostgreSQLContainer\n      .Def(dockerImageName = DockerImageName.parse(\"postgres:17.0\"))\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ pgContainer\n\n  def ds(): DataSource =\n    val ds = PGSimpleDataSource()\n    val pg = pgContainer()\n    ds.setUrl(pg.jdbcUrl)\n    ds.setUser(pg.username)\n    ds.setPassword(pg.password)\n    val userSql =\n      Files.readString(Path.of(getClass.getResource(\"/pg-user.sql\").toURI))\n    val carSql =\n      Files.readString(Path.of(getClass.getResource(\"/pg-car.sql\").toURI))\n    val serviceListSql =\n      Files.readString(\n        Path.of(getClass.getResource(\"/pg-service-list.sql\").toURI)\n      )\n    Manager { use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement)\n      stmt.execute(userSql)\n      stmt.execute(carSql)\n      stmt.execute(serviceListSql)\n    }.get\n    ds\n  end ds\nend PgCodecTests\n"
  },
  {
    "path": "magnum-zio/src/main/scala/com/augustnagro/magnum/magzio/TransactorZIO.scala",
    "content": "package com.augustnagro.magnum.magzio\n\nimport com.augustnagro.magnum.{DbCon, DbTx, SqlException, SqlLogger}\nimport zio.{Task, Trace, UIO, URLayer, ZIO, ZLayer}\n\nimport java.sql.Connection\nimport javax.sql.DataSource\nimport scala.util.control.NonFatal\n\nclass TransactorZIO private (\n    dataSource: DataSource,\n    sqlLogger: SqlLogger,\n    connectionConfig: Connection => Unit\n):\n\n  def withSqlLogger(sqlLogger: SqlLogger): TransactorZIO =\n    new TransactorZIO(\n      dataSource,\n      sqlLogger,\n      connectionConfig\n    )\n\n  def withConnectionConfig(\n      connectionConfig: Connection => Unit\n  ): TransactorZIO =\n    new TransactorZIO(\n      dataSource,\n      sqlLogger,\n      connectionConfig\n    )\n\n  def connect[A](f: DbCon ?=> A)(using Trace): Task[A] =\n    ZIO.blocking(\n      ZIO.acquireReleaseWith(acquireConnection)(releaseConnection)(cn =>\n        ZIO.attempt {\n          connectionConfig(cn)\n          f(using DbCon(cn, sqlLogger))\n        }\n      )\n    )\n\n  def transact[A](f: DbTx ?=> A)(using Trace): Task[A] =\n    ZIO.blocking(\n      ZIO.acquireReleaseWith(acquireConnection)(releaseConnection)(cn =>\n        ZIO.attempt {\n          connectionConfig(cn)\n          cn.setAutoCommit(false)\n          try\n            val res = f(using DbTx(cn, sqlLogger))\n            cn.commit()\n            res\n          catch\n            case NonFatal(t) =>\n              try cn.rollback()\n              catch { case t2 => t.addSuppressed(t2) }\n              throw t\n        }.uninterruptible\n      )\n    )\n\n  private def acquireConnection(using Trace): Task[Connection] =\n    ZIO\n      .attempt(dataSource.getConnection())\n      .mapError(t => SqlException(\"Unable to acquire DB Connection\", t))\n\n  private def releaseConnection(con: Connection)(using Trace): UIO[Unit] =\n    if con eq null then ZIO.unit\n    else\n      ZIO\n        .attempt(con.close())\n        .orDieWith(t =>\n          SqlException(\"Unable to close DB Connection, will die\", t)\n        )\nend TransactorZIO\n\nobject TransactorZIO:\n  private val noOpConnectionConfig: Connection => Unit = _ => ()\n\n  /** Construct a TransactorZIO\n    *\n    * @param sqlLogger\n    *   Logging configuration\n    * @param connectionConfig\n    *   Customize the underlying JDBC Connections\n    */\n  def layer(\n      sqlLogger: SqlLogger,\n      connectionConfig: Connection => Unit\n  ): URLayer[DataSource, TransactorZIO] =\n    ZLayer\n      .service[DataSource]\n      .project(ds =>\n        TransactorZIO(\n          dataSource = ds,\n          sqlLogger = sqlLogger,\n          connectionConfig = connectionConfig\n        )\n      )\n\n  /** Construct a TransactorZIO\n    *\n    * @param sqlLogger\n    *   Logging configuration\n    */\n  def layer(sqlLogger: SqlLogger): URLayer[DataSource, TransactorZIO] =\n    layer(\n      sqlLogger = sqlLogger,\n      connectionConfig = noOpConnectionConfig\n    )\n\n  /** Construct a TransactorZIO */\n  def layer: URLayer[DataSource, TransactorZIO] =\n    layer(\n      sqlLogger = SqlLogger.Default,\n      connectionConfig = noOpConnectionConfig\n    )\n\n  /** Construct a TransactorZIO\n    *\n    * @param connectionConfig\n    *   Customize the underlying JDBC Connections\n    */\n  def layer(\n      connectionConfig: Connection => Unit\n  ): URLayer[DataSource, TransactorZIO] =\n    layer(\n      sqlLogger = SqlLogger.Default,\n      connectionConfig = connectionConfig\n    )\n\nend TransactorZIO\n"
  },
  {
    "path": "magnum-zio/src/test/resources/pg/big-dec.sql",
    "content": "drop table if exists big_dec cascade;\n\ncreate table big_dec (\n    id int primary key,\n    my_big_dec numeric\n);\n\ninsert into big_dec values\n(1, 123),\n(2, null);"
  },
  {
    "path": "magnum-zio/src/test/resources/pg/car.sql",
    "content": "DROP TABLE IF EXISTS car;\n\nCREATE TABLE car (\n    model VARCHAR(50) NOT NULL,\n    id bigint PRIMARY KEY,\n    top_speed INT NOT NULL,\n    vin INT,\n    color TEXT NOT NULL CHECK (color IN ('Red', 'Green', 'Blue')),\n    created TIMESTAMP WITH TIME ZONE NOT NULL\n);\n\nINSERT INTO car (model, id, top_speed, vin, color, created) VALUES\n('McLaren Senna', 1, 208, 123, 'Red', '2024-11-24T22:17:30.000000000Z'::timestamptz),\n('Ferrari F8 Tributo', 2, 212, 124, 'Green', '2024-11-24T22:17:31.000000000Z'::timestamptz),\n('Aston Martin Superleggera', 3, 211, null, 'Blue', '2024-11-24T22:17:32.000000000Z'::timestamptz);\n"
  },
  {
    "path": "magnum-zio/src/test/resources/pg/my-user.sql",
    "content": "drop table if exists my_user cascade;\n\ncreate table my_user (\n    first_name text not null,\n    id bigint primary key generated always as identity\n);\n\ninsert into my_user (first_name) values\n('George'),\n('Alexander'),\n('John');\n"
  },
  {
    "path": "magnum-zio/src/test/resources/pg/no-id.sql",
    "content": "drop table if exists no_id;\n\ncreate table no_id (\n    created_at timestamptz not null default now(),\n    user_name text not null,\n    user_action text not null\n);\n\ninsert into no_id values\n(timestamp '1997-08-15', 'Josh', 'clicked a button'),\n(timestamp '1997-08-16', 'Danny', 'opened a toaster'),\n(timestamp '1997-08-17', 'Greg', 'ran some QA tests');\n"
  },
  {
    "path": "magnum-zio/src/test/resources/pg/person.sql",
    "content": "drop table if exists person cascade;\n\ncreate table person (\n    id bigint primary key,\n    first_name varchar(50),\n    last_name varchar(50) not null,\n    is_admin boolean not null,\n    created timestamptz not null,\n    social_id UUID\n);\n\ninsert into person (id, first_name, last_name, is_admin, created, social_id) values\n(1, 'George', 'Washington', true, now(), 'd06443a6-3efb-46c4-a66a-a80a8a9a5388'),\n(2, 'Alexander', 'Hamilton', true, now(), '529b6c6d-7228-4da5-81d7-13b706f78ddb'),\n(3, 'John', 'Adams', true, now(), null),\n(4, 'Benjamin', 'Franklin', true, now(), null),\n(5, 'John', 'Jay', true, now(), null),\n(6, 'Thomas', 'Jefferson', true, now(), null),\n(7, 'James', 'Madison', true, now(), null),\n(8, null, 'Nagro', false, timestamp '1997-08-12', null);\n"
  },
  {
    "path": "magnum-zio/src/test/scala/com/augustnagro/magnum/magzio/ImmutableRepoZioTests.scala",
    "content": "package com.augustnagro.magnum.magzio\n\nimport com.augustnagro.magnum.*\nimport munit.{FunSuite, Location}\nimport zio.*\n\nimport java.sql.Connection\nimport java.time.OffsetDateTime\nimport scala.util.{Success, Using}\n\ndef immutableRepoZioTests(\n    suite: FunSuite,\n    dbType: DbType,\n    xa: () => TransactorZIO\n)(using\n    Location,\n    DbCodec[OffsetDateTime]\n): Unit =\n  import suite.*\n\n  val runtime: Runtime[Any] = zio.Runtime.default\n\n  def runIO[A](io: ZIO[Any, Throwable, A]): A =\n    Unsafe.unsafe { implicit unsafe =>\n      runtime.unsafe.run(io).getOrThrow()\n    }\n\n  enum Color derives DbCodec:\n    case Red, Green, Blue\n\n  @Table(dbType, SqlNameMapper.CamelToSnakeCase)\n  case class Car(\n      model: String,\n      @Id id: Long,\n      topSpeed: Int,\n      @SqlName(\"vin\") vinNumber: Option[Int],\n      color: Color,\n      created: OffsetDateTime\n  ) derives DbCodec\n\n  val carRepo = ImmutableRepo[Car, Long]\n  val car = TableInfo[Car, Car, Long]\n\n  val allCars = Vector(\n    Car(\n      model = \"McLaren Senna\",\n      id = 1L,\n      topSpeed = 208,\n      vinNumber = Some(123),\n      color = Color.Red,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:30.000000000Z\")\n    ),\n    Car(\n      model = \"Ferrari F8 Tributo\",\n      id = 2L,\n      topSpeed = 212,\n      vinNumber = Some(124),\n      color = Color.Green,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:31.000000000Z\")\n    ),\n    Car(\n      model = \"Aston Martin Superleggera\",\n      id = 3L,\n      topSpeed = 211,\n      vinNumber = None,\n      color = Color.Blue,\n      created = OffsetDateTime.parse(\"2024-11-24T22:17:32.000000000Z\")\n    )\n  )\n\n  test(\"count\"):\n    val count =\n      runIO:\n        xa().connect:\n          carRepo.count\n    assert(count == 3L)\n\n  test(\"existsById\"):\n    val (exists3, exists4) =\n      runIO:\n        xa().connect:\n          carRepo.existsById(3L) -> carRepo.existsById(4L)\n    assert(exists3)\n    assert(!exists4)\n\n  test(\"findAll\"):\n    val cars =\n      runIO:\n        xa().connect:\n          carRepo.findAll\n    assert(cars == allCars)\n\n  test(\"findById\"):\n    val (exists3, exists4) =\n      runIO:\n        xa().connect:\n          carRepo.findById(3L) -> carRepo.findById(4L)\n    assert(exists3.get == allCars.last)\n    assert(exists4 == None)\n\n  test(\"findAllByIds\"):\n    assume(dbType != ClickhouseDbType)\n    assume(dbType != MySqlDbType)\n    assume(dbType != OracleDbType)\n    assume(dbType != SqliteDbType)\n    val ids =\n      runIO:\n        xa().connect:\n          carRepo.findAllById(Vector(1L, 3L)).map(_.id)\n    assert(ids == Vector(1L, 3L))\n\n  test(\"serializable transaction\"):\n    val count =\n      runIO:\n        xa()\n          .withConnectionConfig(withSerializable)\n          .transact:\n            carRepo.count\n    assert(count == 3L)\n\n  def withSerializable(con: Connection): Unit =\n    con.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE)\n\n  test(\"select query\"):\n    val minSpeed: Int = 210\n    val query =\n      sql\"select ${car.all} from $car where ${car.topSpeed} > $minSpeed\"\n        .query[Car]\n    val result =\n      runIO:\n        xa().connect:\n          query.run()\n    assertNoDiff(\n      query.frag.sqlString,\n      \"select model, id, top_speed, vin, color, created from car where top_speed > ?\"\n    )\n    assert(query.frag.params == Vector(minSpeed))\n    assert(result == allCars.tail)\n\n  test(\"select query with aliasing\"):\n    val minSpeed = 210\n    val cAlias = car.alias(\"c\")\n    val query =\n      sql\"select ${cAlias.all} from $cAlias where ${cAlias.topSpeed} > $minSpeed\"\n        .query[Car]\n    val result =\n      runIO:\n        xa().connect:\n          query.run()\n    assertNoDiff(\n      query.frag.sqlString,\n      \"select c.model, c.id, c.top_speed, c.vin, c.color, c.created from car c where c.top_speed > ?\"\n    )\n    assert(query.frag.params == Vector(minSpeed))\n    assert(result == allCars.tail)\n\n  test(\"select via option\"):\n    val vin = Option(124)\n    val cars =\n      runIO:\n        xa().connect:\n          sql\"select * from car where vin = $vin\"\n            .query[Car]\n            .run()\n    assert(cars == allCars.filter(_.vinNumber == vin))\n\n  test(\"tuple select\"):\n    val tuples =\n      runIO:\n        xa().connect:\n          sql\"select model, color from car where id = 2\"\n            .query[(String, Color)]\n            .run()\n    assert(tuples == Vector(allCars(1).model -> allCars(1).color))\n\n  test(\"large tuple support does not override hand-rolled Tuple[2-4] codecs\"):\n    val tuple2ACodec = summon[DbCodec[(String, Color)]]\n    val tuple2BCodec = summon[DbCodec[(String, Int)]]\n    assert(tuple2ACodec.getClass == tuple2BCodec.getClass)\n    val tuple5ACodec = summon[DbCodec[(String, Color, Int, Long, Option[Int])]]\n    assert(tuple5ACodec.getClass != tuple2ACodec.getClass)\n    val tuple5BCodec = summon[DbCodec[(Int, Int, Int, Long, Option[Int])]]\n    assert(tuple5BCodec.getClass != tuple5ACodec.getClass)\n\n  test(\"large tuple select\"):\n    val tuple = runIO:\n      xa().connect:\n        sql\"select model, color, top_speed, id, vin from car where id = 2\"\n          .query[(String, Color, Int, Long, Option[Int])]\n          .run()\n          .head\n    val c = allCars(1)\n    assert(tuple == (c.model, c.color, c.topSpeed, c.id, c.vinNumber))\n\n  test(\"reads null int as None and not Some(0)\"):\n    val maybeCar =\n      runIO:\n        xa().connect:\n          carRepo.findById(3L)\n    assert(maybeCar.get.vinNumber == None)\n\n  test(\"created timestamps should match\"):\n    val allCars =\n      runIO:\n        xa().connect:\n          carRepo.findAll\n    assert(allCars.map(_.created) == allCars.map(_.created))\n\n  test(\".query iterator\"):\n    val carsCount =\n      runIO:\n        xa().connect:\n          Using.Manager(implicit use =>\n            val it = sql\"SELECT * FROM car\".query[Car].iterator()\n            it.map(_.id).size\n          )\n    assert(carsCount == Success(3))\n\nend immutableRepoZioTests\n"
  },
  {
    "path": "magnum-zio/src/test/scala/com/augustnagro/magnum/magzio/PgZioTests.scala",
    "content": "package com.augustnagro.magnum.magzio\n\nimport com.augustnagro.magnum.*\nimport com.dimafeng.testcontainers.PostgreSQLContainer\nimport com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures\nimport munit.{AnyFixture, FunSuite, Location}\nimport org.postgresql.ds.PGSimpleDataSource\nimport org.testcontainers.utility.DockerImageName\nimport zio.{Scope, Unsafe, ZLayer}\n\nimport java.nio.file.{Files, Path}\nimport scala.util.Using\nimport scala.util.Using.Manager\n\nclass PgZioTests extends FunSuite, TestContainersFixtures:\n\n  immutableRepoZioTests(this, PostgresDbType, xa)\n\n  val pgContainer = ForAllContainerFixture(\n    PostgreSQLContainer\n      .Def(dockerImageName = DockerImageName.parse(\"postgres:17.0\"))\n      .createContainer()\n  )\n\n  override def munitFixtures: Seq[AnyFixture[_]] =\n    super.munitFixtures :+ pgContainer\n\n  def xa(): TransactorZIO =\n    val ds = PGSimpleDataSource()\n    val pg = pgContainer()\n    ds.setUrl(pg.jdbcUrl)\n    ds.setUser(pg.username)\n    ds.setPassword(pg.password)\n    val tableDDLs = Vector(\n      \"/pg/car.sql\",\n      \"/pg/person.sql\",\n      \"/pg/my-user.sql\",\n      \"/pg/no-id.sql\",\n      \"/pg/big-dec.sql\"\n    ).map(p => Files.readString(Path.of(getClass.getResource(p).toURI)))\n\n    Manager(use =>\n      val con = use(ds.getConnection)\n      val stmt = use(con.createStatement)\n      for ddl <- tableDDLs do stmt.execute(ddl)\n    ).get\n    // todo unsafe\n    Unsafe.unsafe { implicit unsafe =>\n      zio.Runtime.default.unsafe\n        .run(\n          TransactorZIO.layer\n            .build(Scope.global)\n            .map(_.get)\n            .provide(\n              ZLayer.succeed(ds) ++ zio.Runtime.enableLoomBasedBlockingExecutor\n            )\n        )\n        .getOrThrow()\n    }\n  end xa\nend PgZioTests\n"
  },
  {
    "path": "project/build.properties",
    "content": "sbt.version=1.12.8\n"
  },
  {
    "path": "project/plugins.sbt",
    "content": "addSbtPlugin(\"org.scalameta\" % \"sbt-scalafmt\" % \"2.5.2\")\naddSbtPlugin(\"com.github.sbt\" % \"sbt-pgp\" % \"2.3.0\")\n"
  }
]