[
  {
    "path": ".circleci/config.yml",
    "content": "version: 2.1\norbs:\n  ruby: circleci/ruby@1.4.0\n\njobs:\n  test:\n    parallelism: 3\n    parameters:\n      ruby-version:\n        type: string\n      bundle-version:\n        type: string\n\n    docker:\n      - image: cimg/ruby:<< parameters.ruby-version >>\n      - image: cimg/postgres:14.6\n        environment:\n          POSTGRES_USER: postgres\n          POSTGRES_PASSWORD: torque\n          POSTGRES_DB: torque_postgresql\n\n    steps:\n      - checkout\n      - run: ruby --version\n      - run:\n          command: 'bundle install --gemfile gemfiles/<< parameters.bundle-version >>'\n          name: Install Bundle\n      - run:\n          command: dockerize -wait tcp://localhost:5432 -timeout 1m\n          name: Wait for DB\n      - run:\n          command: 'bundle exec --gemfile gemfiles/<< parameters.bundle-version >> rspec'\n          name: Run Tests\n          environment:\n            DATABASE_URL: 'postgresql://postgres:torque@localhost/torque_postgresql'\n\nreferences:\n  matrix_build: &matrix_build\n    test:\n      matrix:\n        parameters:\n          ruby-version: ['3.2', '3.3', '3.4']\n          bundle-version: ['Gemfile.rails-8.0']\n\nworkflows:\n  commit:\n    jobs:\n      - <<: *matrix_build\n"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "# These are supported funding model platforms\n\ngithub: ['crashtech']\n# patreon: # Replace with a single Patreon username\n# open_collective: # Replace with a single Open Collective username\n# ko_fi: # Replace with a single Ko-fi username\n# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel\n# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry\n# liberapay: # Replace with a single Liberapay username\n# issuehunt: # Replace with a single IssueHunt username\n# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry\n# polar: # Replace with a single Polar username\n# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username\n# thanks_dev: # Replace with a single thanks.dev username\n# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']\n"
  },
  {
    "path": ".gitignore",
    "content": ".env\n*.gem\n*.rbc\n.bundle\n.config\n.yardoc\n.byebug_history\n.versions.conf\nGemfile.lock\ncoverage\ndoc/\n.config\ncoverage/\nInstalledFiles\npkg/\nrdoc/\nspec/reports/\nspec/examples.txt\ntest/tmp/\ntest/version_tmp/\ntmp/\nbin/\n.ruby-version\n.ruby-gemset\ngemfiles/*.lock\n"
  },
  {
    "path": ".rspec",
    "content": "--color\n--require spec_helper\n"
  },
  {
    "path": "Gemfile",
    "content": "source 'https://rubygems.org'\n\n# Declare your gem's dependencies in torque_postgresql.gemspec.\n# Bundler will treat runtime dependencies like base dependencies, and\n# development dependencies will be added by default to the :development group.\ngemspec\n\n# Declare any dependencies that are still in development here instead of in\n# your gemspec. These might include edge Rails or gems from your path or\n# Git. Remember to move these dependencies to your gemspec before releasing\n# your gem to rubygems.org.\n\n# To use a debugger\ngem 'debug'\n\n# Optional dependencies\ngem 'annotate'\n"
  },
  {
    "path": "MIT-LICENSE",
    "content": "Copyright 2016 Carlos Silva\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\nLIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\nWITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "<a href=\"https://github.com/crashtech/torque-postgresql\">\n  <img src=\"./docs/assets/images/github.png\" alt=\"Torque PostgreSQL - Advanced PG features in a seamlessly RoR interface\" />\n</a>\n\n[![CircleCI](https://circleci.com/gh/crashtech/torque-postgresql/tree/master.svg?style=svg)](https://circleci.com/gh/crashtech/torque-postgresql/tree/master)\n[![Code Climate](https://codeclimate.com/github/crashtech/torque-postgresql/badges/gpa.svg)](https://codeclimate.com/github/crashtech/torque-postgresql)\n[![Gem Version](https://badge.fury.io/rb/torque-postgresql.svg)](https://badge.fury.io/rb/torque-postgresql)\n<!--([![Test Coverage](https://codeclimate.com/github/crashtech/torque-postgresql/badges/coverage.svg)](https://codeclimate.com/github/crashtech/torque-postgresql/coverage))-->\n<!--([![Dependency Status](https://gemnasium.com/badges/github.com/crashtech/torque-postgresql.svg)](https://gemnasium.com/github.com/crashtech/torque-postgresql))-->\n\n* [Wiki](https://github.com/crashtech/torque-postgresql/wiki)\n* [Bugs](https://github.com/crashtech/torque-postgresql/issues)\n* [TODO](https://github.com/crashtech/torque-postgresql/wiki/TODO)\n\n# Description\n`torque-postgresql` is a plugin that enhances Ruby on Rails enabling easy access to existing PostgreSQL advanced resources, such as data types and query statements. Its features are designed to be similar to Rails architecture and work as smoothly as possible.\n\nFully compatible with `schema.rb` and 100% plug-and-play, with optional configurations, so that it can be adapted to your project's design pattern.\n\n# Installation\n\nTo install torque-postgresql you need to add the following to your Gemfile:\n```ruby\ngem 'torque-postgresql', '~> 2.0'   # For Rails >= 6.0 < 6.1\ngem 'torque-postgresql', '~> 2.0.4' # For Rails >= 6.1\ngem 'torque-postgresql', '~> 3.0'   # For Rails >= 7.0 < 7.1\ngem 'torque-postgresql', '~> 3.3'   # For Rails >= 7.1 < 7.2\ngem 'torque-postgresql', '~> 3.4'   # For Rails >= 7.2 < 8.0\ngem 'torque-postgresql', '~> 4.0'   # For Rails >= 8.0\n```\n\nAlso, run:\n\n```\n$ bundle\n```\n\nOr, for non-Gemfile related usage, simply:\n\n```\ngem install torque-postgresql\n```\n\n# Usage\nThese are the currently available features:\n\n* [Configuring](https://github.com/crashtech/torque-postgresql/wiki/Configuring)\n\n## Data types\n\n* [Box](https://github.com/crashtech/torque-postgresql/wiki/Box)\n* [Circle](https://github.com/crashtech/torque-postgresql/wiki/Circle)\n* [Date/Time Range](https://github.com/crashtech/torque-postgresql/wiki/Date-Time-Range)\n* [Enum](https://github.com/crashtech/torque-postgresql/wiki/Enum)\n* [EnumSet](https://github.com/crashtech/torque-postgresql/wiki/Enum-Set)\n* [Interval](https://github.com/crashtech/torque-postgresql/wiki/Interval)\n* [Line](https://github.com/crashtech/torque-postgresql/wiki/Line)\n* [Segment](https://github.com/crashtech/torque-postgresql/wiki/Segment)\n\n## Querying\n\n* [Arel](https://github.com/crashtech/torque-postgresql/wiki/Arel)\n* [Auxiliary Statements](https://github.com/crashtech/torque-postgresql/wiki/Auxiliary-Statements)\n* [Belongs to Many](https://github.com/crashtech/torque-postgresql/wiki/Belongs-to-Many)\n* [Distinct On](https://github.com/crashtech/torque-postgresql/wiki/Distinct-On)\n* [Dynamic Attributes](https://github.com/crashtech/torque-postgresql/wiki/Dynamic-Attributes)\n* [Has Many](https://github.com/crashtech/torque-postgresql/wiki/Has-Many)\n* [Inherited Tables](https://github.com/crashtech/torque-postgresql/wiki/Inherited-Tables)\n* [Insert All](https://github.com/crashtech/torque-postgresql/wiki/Insert-All)\n* [Multiple Schemas](https://github.com/crashtech/torque-postgresql/wiki/Multiple-Schemas)\n* [Predicate Builder](https://github.com/crashtech/torque-postgresql/wiki/Predicate-Builder)\n* [Full‐Text Search](https://github.com/crashtech/torque-postgresql/wiki/Full‐Text-Search)\n* [Join Series](https://github.com/crashtech/torque-postgresql/wiki/Join-Series)\n* [Buckets](https://github.com/crashtech/torque-postgresql/wiki/Buckets)\n* [Versioned Commands (Views, Functions, Types)](https://github.com/crashtech/torque-postgresql/wiki/Versioned-Commands)\n\n# How to Contribute\n\nTo start, simply fork the project, create a `.env` file following this example:\n\n```\nDATABASE_URL=\"postgres://USER:PASSWORD@localhost/DATABASE\"\n```\n\nRun local tests using:\n```\n$ bundle install\n$ bundle exec rake spec\n```\nFinally, fix and send a pull request.\n\n## License\n\nCopyright © 2017- Carlos Silva. See [The MIT License](MIT-LICENSE) for further details.\n"
  },
  {
    "path": "README.rdoc",
    "content": "= Torque PostgreSQL -- Add support to complex resources of PostgreSQL, like data\ntypes, user-defined types and auxiliary statements (CTE)\n\nThis is a plugin that enhance Ruby on Rails enabling easy access to existing\nPostgreSQL advanced resources, such as data types and queries statements. Its\nfeatures are design to be as similar as Rails architecture and they work as\nsmooth as possible.\n\n100% plug-and-play, with optional configurations so that can be adapted to\nyour's project design pattern.\n\nA short rundown of some of the major features:\n\n* Enum type manager\n\nIt creates a separated class to hold each enum set that can be used by multiple\nmodels, it also keeps the database consistent. The enum type is known to have\nbetter performance against string- and integer-like enums.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/datatype-enum.html]\n\n   create_enum :roles, %i(visitor manager admin)\n\n   add_column :users, :role, :roles\n\n   Enum::Roles.admin\n\n   Users.roles\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Attributes/Enum.html]\n\n* Enum set type manager\n\nThe enum type is known to have a better performance against string- and integer-\nlike enums. Now with the array option, which behaves like binary assignment,\neach record can have multiple enum values.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/datatype-enum.html]\n\n   create_enum :permissions, %i(read write exec)\n\n   add_column :posts, :creator_permissions, :permissions, array: true\n\n   Enum::PermissionsSet.new(3) # [:read, :write]\n\n   post.creator_permissions.write?\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Attributes/EnumSet.html]\n\n* Period complex queries\n\nThis provides extended and complex calculations over date and time ranges. In a\nfew words, you can now store `start_time` and `finish_time` in the same column\nand relies on the methods provided here to fo your magic.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/functions-range.html]\n\n   add_column :events, :period, :tsrange\n   add_column :events, :interval, :interval\n\n   Event.create(title: 'Test', period: ['2019-01-01 12:00:00', '2019-01-01 14:00:00'], interval: 15.minutes)\n\n   Event.overlapping('2019-01-01 13:00:00', '2019-01-01 15:00:00').count\n\n   Event.not_real_overlapping('2019-01-01 11:00:00', '2019-01-01 13:00:00').empty?\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Attributes/Builder/Period.html]\n\n* Has many array association\n\nThe idea is simple, one table stores all the ids and the other one says that\n`has many` records on that table because its records ids exist in the column of\nthe array. Like: `Tag has many Videos connected through an array`.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/arrays.html]\n\n   add_column :videos, :tag_ids, :bigint, array: true\n\n   Tag.has_many :videos, array: true\n\n   Tag.videos.size\n\n   Tag.videos << another_video\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Reflection/AbstractReflection.html]\n\n* Belongs to many association\n\nThe original `belongs_to` associations define a `SingularAssociation`, which\nmeans that it could be extended with `array: true`. In this case, I decided to\ncreate my own `CollectionAssociation` called `belongs_to_many`, which behaves\nsimilar to the single one, but storing and returning a list of records.\n\nWith this, now you can say things like `Project belongs to many employees`,\nwhich is more syntactically correct than `Project has many employees`\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/arrays.html]\n\n   add_column :videos, :tag_ids, :bigint, array: true\n\n   Video.belongs_to_many :tags\n\n   Video.tags.size\n\n   Video.tags << Tag.new(title: 'rails')\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Reflection/BelongsToManyReflection.html]\n\n* Distinct On\n\nMySQL-like group by statement on queries. It keeps only the first row of each\nset of rows where the given expressions evaluate to equal.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/sql-select.html#SQL-DISTINCT]\n\n   User.distinct_on(:name).all\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Relation/DistinctOn.html]\n\n* Auxiliary Statements\n\nProvides a way to write auxiliary statements for use in a larger query. It's\nreconfigured on the model, and then can be used during querying process.\n{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/queries-with.html]\n\n   class User < ActiveRecord::Base\n     auxiliary_statement :last_comment do |cte|\n       cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)\n       cte.attributes content: :last_comment_content\n     end\n   end\n\n   user = User.with(:last_comment).first\n\n  {Learn more}[link:classes/Torque/PostgreSQL/AuxiliaryStatement.html]\n\n* Multiple Schemas\n\nAllows models and modules to have a schema associated with them, so that\ndevelopers can better organize their tables into schemas and build features in\na way that the database can better represent how they are separated.\n\n   create_schema \"internal\", force: :cascade\n\n   module Internal\n     class User < ActiveRecord::Base\n       self.schema = 'internal'\n     end\n   end\n\n   Internal::User.all\n\n  {Learn more}[link:classes/Torque/PostgreSQL/Adapter/DatabaseStatements.html]\n\n== Download and installation\n\nThe latest version of Torque PostgreSQL can be installed with RubyGems:\n\n  $ gem install torque-postgresql\n\nSource code can be downloaded direct from the GitHub repository:\n\n* https://github.com/crashtech/torque-postgresql\n\n\n== License\n\nTorque PostgreSQL is released under the MIT license:\n\n* http://www.opensource.org/licenses/MIT\n"
  },
  {
    "path": "Rakefile",
    "content": "begin\n  require 'bundler/setup'\nrescue LoadError\n  puts 'You must `gem install bundler` and `bundle install` to run rake tasks'\nend\n\nrequire 'rdoc/task'\n\nRDoc::Task.new(:rdoc) do |rdoc|\n  rdoc.rdoc_dir = 'rdoc'\n  rdoc.title    = 'Torque::Postgresql'\n  rdoc.options << '--line-numbers'\n  rdoc.rdoc_files.include('README.rdoc')\n  rdoc.rdoc_files.include('lib/**/*.rb')\nend\n\ndesc 'Initialize the local environment'\ntask :environment do |t|\n  lib  = File.expand_path('../lib', __FILE__)\n  spec = File.expand_path('../spec', __FILE__)\n  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)\n  $LOAD_PATH.unshift(spec) unless $LOAD_PATH.include?(spec)\nend\n\ndesc 'Prints a schema dump of the test database'\ntask dump: :environment do |t|\n  require 'byebug'\n  require 'spec_helper'\n  ActiveRecord::SchemaDumper.dump\nend\n\nrequire 'rspec/core/rake_task'\nRSpec::Core::RakeTask.new(:spec)\ntask default: :spec\n"
  },
  {
    "path": "gemfiles/Gemfile.rails-8.0",
    "content": "source 'https://rubygems.org'\n\ngem 'rails', '~> 8.0', '< 8.1'\ngem 'pg', '~> 1.4.0'\n\ngemspec path: \"../\"\n"
  },
  {
    "path": "lib/generators/torque/function_generator.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'torque/postgresql/versioned_commands/generator'\n\nmodule Torque\n  module Generators\n    class FunctionGenerator < Rails::Generators::Base\n      include Torque::PostgreSQL::VersionedCommands::Generator\n\n      alias create_function_file create_migration_file\n    end\n  end\nend\n"
  },
  {
    "path": "lib/generators/torque/templates/function.sql.erb",
    "content": "CREATE OR REPLACE FUNCTION <%= name %>()\nRETURNS void AS $$\n  -- Function body goes here\n$$ LANGUAGE sql;\n"
  },
  {
    "path": "lib/generators/torque/templates/type.sql.erb",
    "content": "DROP TYPE IF EXISTS <%= name %>;\nCREATE TYPE <%= name %>;\n"
  },
  {
    "path": "lib/generators/torque/templates/view.sql.erb",
    "content": "<%= \"DROP MATERIALIZED VIEW IF EXISTS #{name};\\n\" if options[:materialized] %>CREATE <%= options[:materialized] ? 'MATERIALIZED' : 'OR REPLACE' %> VIEW <%= name %> AS (\n  -- View body goes here\n);\n"
  },
  {
    "path": "lib/generators/torque/type_generator.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'torque/postgresql/versioned_commands/generator'\n\nmodule Torque\n  module Generators\n    class TypeGenerator < Rails::Generators::Base\n      include Torque::PostgreSQL::VersionedCommands::Generator\n\n      alias create_type_file create_migration_file\n    end\n  end\nend\n"
  },
  {
    "path": "lib/generators/torque/view_generator.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'torque/postgresql/versioned_commands/generator'\n\nmodule Torque\n  module Generators\n    class ViewGenerator < Rails::Generators::Base\n      include Torque::PostgreSQL::VersionedCommands::Generator\n\n      class_option :materialized, type: :boolean, aliases: %i(--m), default: false,\n        desc: 'Use materialized view instead of regular view'\n\n      alias create_view_file create_migration_file\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/database_statements.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module DatabaseStatements\n\n        EXTENDED_DATABASE_TYPES = %i[enum enum_set interval]\n\n        # Switch between dump mode or not\n        def dump_mode!\n          @_dump_mode = !!!@_dump_mode\n        end\n\n        # List of schemas blocked by the application in the current connection\n        def schemas_blacklist\n          @schemas_blacklist ||= Torque::PostgreSQL.config.schemas.blacklist +\n            (@config.dig(:schemas, 'blacklist') || [])\n        end\n\n        # List of schemas used by the application in the current connection\n        def schemas_whitelist\n          @schemas_whitelist ||= Torque::PostgreSQL.config.schemas.whitelist +\n            (@config.dig(:schemas, 'whitelist') || [])\n        end\n\n        # A list of schemas on the search path sanitized\n        def schemas_search_path_sanitized\n          @schemas_search_path_sanitized ||= begin\n            db_user = @config[:username] || ENV['USER'] || ENV['USERNAME']\n            schema_search_path.split(',').map { |item| item.strip.sub('\"$user\"', db_user) }\n          end\n        end\n\n        # Check if a given type is valid.\n        def valid_type?(type)\n          super || extended_types.include?(type)\n        end\n\n        # Get the list of extended types\n        def extended_types\n          EXTENDED_DATABASE_TYPES\n        end\n\n        # Checks if a given schema exists in the database. If +filtered+ is\n        # given as false, then it will check regardless of whitelist and\n        # blacklist\n        def schema_exists?(name, filtered: true)\n          return user_defined_schemas.include?(name.to_s) if filtered\n\n          query_value(<<-SQL, \"SCHEMA\") == 1\n            SELECT 1 FROM pg_catalog.pg_namespace WHERE nspname = #{quote(name)}\n          SQL\n        end\n\n        # Returns true if type exists.\n        def type_exists?(name)\n          user_defined_types.key? name.to_s\n        end\n        alias data_type_exists? type_exists?\n\n        # Change some of the types being mapped\n        def initialize_type_map(m = type_map)\n          super\n\n          if PostgreSQL.config.geometry.enabled\n            m.register_type 'box',      OID::Box.new\n            m.register_type 'circle',   OID::Circle.new\n            m.register_type 'line',     OID::Line.new\n            m.register_type 'segment',  OID::Segment.new\n          end\n\n          if PostgreSQL.config.interval.enabled\n            m.register_type 'interval', OID::Interval.new\n          end\n        end\n\n        # :nodoc:\n        def load_additional_types(oids = nil)\n          type_map.alias_type 'regclass', 'varchar'\n          type_map.alias_type 'regconfig', 'varchar'\n          super\n          torque_load_additional_types(oids)\n        end\n\n        # Add the composite types to be loaded too.\n        def torque_load_additional_types(oids = nil)\n          return unless torque_load_additional_types?\n\n          # Types: (b)ase, (c)omposite, (d)omain, (e)num, (p)seudotype, (r)ange\n          # (m)ultirange\n\n          query = <<~SQL\n            SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput,\n                   r.rngsubtype, t.typtype, t.typbasetype, t.typarray\n            FROM pg_type as t\n            LEFT JOIN pg_range as r ON oid = rngtypid\n            LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace\n            WHERE n.nspname NOT IN ('pg_catalog', 'information_schema')\n          SQL\n\n          if oids\n            query += \" AND t.oid IN (%s)\" % oids.join(\", \")\n          else\n            query += \" AND t.typtype IN ('e')\"\n          end\n\n          options = { allow_retry: true, materialize_transactions: false }\n          internal_execute(query, 'SCHEMA', **options).each do |row|\n            if row['typtype'] == 'e' && PostgreSQL.config.enum.enabled\n              OID::Enum.create(row, type_map)\n            end\n          end\n        end\n\n        def torque_load_additional_types?\n          PostgreSQL.config.enum.enabled\n        end\n\n        # Gets a list of user defined types.\n        # You can even choose the +category+ filter\n        def user_defined_types(*categories)\n          categories = categories.compact.presence || %w[c e p r m]\n\n          query(<<-SQL, 'SCHEMA').to_h\n            SELECT t.typname, t.typtype\n            FROM pg_type as t\n            LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace\n            WHERE n.nspname NOT IN ('pg_catalog', 'information_schema')\n            AND t.typtype IN ('#{categories.join(\"', '\")}')\n          SQL\n        end\n\n        # Get the list of inherited tables associated with their parent tables\n        def inherited_tables\n          tables = query(<<-SQL, 'SCHEMA')\n            SELECT inhrelid::regclass  AS table_name,\n                   inhparent::regclass AS inheritances\n            FROM pg_inherits\n            JOIN pg_class parent ON pg_inherits.inhparent = parent.oid\n            JOIN pg_class child  ON pg_inherits.inhrelid  = child.oid\n            ORDER BY inhrelid\n          SQL\n\n          tables.each_with_object({}) do |(child, parent), result|\n            (result[child] ||= []) << parent\n          end\n        end\n\n        # Get the list of schemas that were created by the user\n        def user_defined_schemas\n          query_values(user_defined_schemas_sql, 'SCHEMA')\n        end\n\n        # Build the query for allowed schemas\n        def user_defined_schemas_sql\n          <<-SQL.squish\n            SELECT nspname\n            FROM pg_catalog.pg_namespace\n            WHERE 1=1 AND #{filter_by_schema.join(' AND ')}\n            ORDER BY oid\n          SQL\n        end\n\n        # Get the list of columns, and their definition, but only from the\n        # actual table, does not include columns that comes from inherited table\n        def column_definitions(table_name)\n          query(<<~SQL, \"SCHEMA\")\n              SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n                     pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n                     c.collname, col_description(a.attrelid, a.attnum) AS comment,\n                     #{supports_identity_columns? ? 'attidentity' : quote('')} AS identity,\n                     #{supports_virtual_columns? ? 'attgenerated' : quote('')} as attgenerated\n                FROM pg_attribute a\n                LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n                LEFT JOIN pg_type t ON a.atttypid = t.oid\n                LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation\n               WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass\n                 AND a.attnum > 0 AND NOT a.attisdropped\n                 #{'AND a.attislocal' if @_dump_mode}\n               ORDER BY a.attnum\n          SQL\n        end\n\n        # Get all possible schema entries that can be created via versioned\n        # commands of the provided type. Mostly for covering removals and not\n        # dump them\n        def list_versioned_commands(type)\n          query =\n            case type\n            when :function\n              <<-SQL.squish\n                SELECT n.nspname AS schema, p.proname AS name\n                FROM pg_catalog.pg_proc p\n                INNER JOIN pg_namespace n ON n.oid = p.pronamespace\n                WHERE 1=1 AND #{filter_by_schema.join(' AND ')};\n              SQL\n            when :type\n              <<-SQL.squish\n                SELECT n.nspname AS schema, t.typname AS name\n                FROM pg_type t\n                INNER JOIN pg_namespace n ON n.oid = t.typnamespace\n                WHERE 1=1 AND t.typtype NOT IN ('e')\n                  AND #{filter_by_schema.join(' AND ')};\n              SQL\n            when :view\n              <<-SQL.squish\n                SELECT n.nspname AS schema, c.relname AS name\n                FROM pg_class c\n                INNER JOIN pg_namespace n ON n.oid = c.relnamespace\n                WHERE 1=1 AND c.relkind IN ('v', 'm')\n                  AND #{filter_by_schema.join(' AND ')};\n              SQL\n            end\n\n          select_rows(query, 'SCHEMA')\n        end\n\n        # Build the condition for filtering by schema\n        def filter_by_schema\n          conditions = []\n          conditions << <<-SQL.squish if schemas_blacklist.any?\n            nspname NOT LIKE ALL (ARRAY['#{schemas_blacklist.join(\"', '\")}'])\n          SQL\n\n          conditions << <<-SQL.squish if schemas_whitelist.any?\n            nspname LIKE ANY (ARRAY['#{schemas_whitelist.join(\"', '\")}'])\n          SQL\n          conditions\n        end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/array.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module OID\n        module Array\n          def force_equality?(value)\n            PostgreSQL.config.predicate_builder.handle_array_attributes ? false : super\n          end\n        end\n\n        ::ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array.prepend(Array)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/box.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class Box < Struct.new(:x1, :y1, :x2, :y2)\n      def points\n        klass = Torque::PostgreSQL.config.geometry.point_class\n        [\n          klass.new(x1, y1),\n          klass.new(x1, y2),\n          klass.new(x2, y1),\n          klass.new(x2, y2),\n        ]\n      end\n    end\n\n    config.geometry.box_class ||= ::ActiveRecord.const_set('Box', Class.new(Box))\n\n    module Adapter\n      module OID\n        class Box < Torque::PostgreSQL::GeometryBuilder\n\n          PIECES = %i[x1 y1 x2 y2].freeze\n          FORMATION = '((%s,%s),(%s,%s))'.freeze\n\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/circle.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class Circle < Struct.new(:x, :y, :r)\n      alias radius r\n      alias radius= r=\n\n      def center\n        point_class.new(x, y)\n      end\n\n      def center=(value)\n        parts = value.is_a?(point_class) ? [value.x, value.y] : value[0..1]\n        self.x = parts.first\n        self.y = parts.last\n      end\n\n      private\n\n        def point_class\n          Torque::PostgreSQL.config.geometry.point_class\n        end\n    end\n\n    config.geometry.circle_class ||= ::ActiveRecord.const_set('Circle', Class.new(Circle))\n\n    module Adapter\n      module OID\n        class Circle < Torque::PostgreSQL::GeometryBuilder\n\n          PIECES = %i[x y r].freeze\n          FORMATION = '<(%s,%s),%s>'.freeze\n\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/enum.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module OID\n        class Enum < ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Enum\n\n          attr_reader :name, :klass, :set_klass, :enum_klass\n\n          def self.create(row, type_map)\n            name    = row['typname']\n            oid     = row['oid'].to_i\n            arr_oid = row['typarray'].to_i\n\n            oid_klass     = Enum.new(name)\n            oid_set_klass = EnumSet.new(name, oid_klass.klass)\n            oid_klass.instance_variable_set(:@set_klass, oid_set_klass)\n\n            type_map.register_type(oid,     oid_klass)\n            type_map.register_type(arr_oid, oid_set_klass)\n          end\n\n          def initialize(name)\n            @name  = name\n            @klass = Attributes::Enum.lookup(name)\n\n            @enum_klass = self\n          end\n\n          def hash\n            [self.class, name].hash\n          end\n\n          def serialize(value)\n            return if value.blank?\n            value = cast_value(value)\n            value.to_s unless value.nil?\n          end\n\n          def assert_valid_value(value)\n            cast_value(value)\n          end\n\n          # Always use symbol value for schema dumper\n          def type_cast_for_schema(value)\n            cast_value(value).to_sym.inspect\n          end\n\n          def ==(other)\n            self.class == other.class &&\n              other.klass == klass &&\n              other.type == type\n          end\n\n          private\n\n            def cast_value(value)\n              return if value.blank?\n              return value if value.is_a?(@klass)\n              @klass.new(value)\n            rescue Attributes::Enum::EnumError\n              nil\n            end\n\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/enum_set.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module OID\n        class EnumSet < Enum\n          def initialize(name, enum_klass)\n            @name  = name + '[]'\n            @klass = Attributes::EnumSet.lookup(name, enum_klass)\n\n            @set_klass = self\n            @enum_klass = enum_klass\n          end\n\n          def type\n            :enum\n          end\n\n          def deserialize(value)\n            return unless value.present?\n            value = value[1..-2].split(',') if value.is_a?(String)\n            cast_value(value)\n          end\n\n          def serialize(value)\n            return if value.blank?\n            value = cast_value(value)\n\n            return if value.blank?\n            \"{#{value.map(&:to_s).join(',')}}\"\n          end\n\n          # Always use symbol values for schema dumper\n          def type_cast_for_schema(value)\n            cast_value(value).map(&:to_sym).inspect\n          end\n\n          private\n\n            def cast_value(value)\n              return if value.blank?\n              return value if value.is_a?(@klass)\n              @klass.new(value)\n            rescue Attributes::EnumSet::EnumSetError\n              nil\n            end\n\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/interval.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module OID\n        class Interval < ActiveModel::Type::Value\n\n          CAST_PARTS = [:years, :months, :days, :hours, :minutes, :seconds]\n\n          def type\n            :interval\n          end\n\n          # Accepts database-style string, numeric as seconds, array of parts\n          # padded to left, or a hash\n          #\n          # Examples:\n          #   [12, 0, 0]\n          #   produces: 12 hours, 0 minutes, and 0 seconds\n          #\n          #   [nil, nil, 3, 0, 0, 0]\n          #   produces: 3 days, 0 hours, 0 minutes, and 0 seconds\n          #\n          #   {minutes: 12, seconds: 0}\n          #   produces: 12 minutes, and 0 seconds\n          def cast(value)\n            return if value.blank?\n            case value\n            when ::String then deserialize(value)\n            when ::ActiveSupport::Duration then value\n            when ::Numeric\n              parts = CAST_PARTS.map do |part|\n                rest, value = value.divmod(1.send(part))\n                rest == 0 ? nil : [part, rest]\n              end\n              parts_to_duration(parts.compact)\n            when ::Array\n              value.compact!\n              parts = CAST_PARTS.drop(6 - value.size).zip(value).to_h\n              parts_to_duration(parts)\n            when ::Hash\n              parts_to_duration(value)\n            else\n              value\n            end\n          end\n\n          # Uses the ActiveSupport::Duration::ISO8601Parser\n          # See ActiveSupport::Duration#parse\n          # The value must be Integer when no precision is given\n          def deserialize(value)\n            return if value.blank?\n            ActiveSupport::Duration.parse(value)\n          end\n\n          # Uses the ActiveSupport::Duration::ISO8601Serializer\n          # See ActiveSupport::Duration#iso8601\n          def serialize(value)\n            return if value.blank?\n            value = cast(value) unless value.is_a?(ActiveSupport::Duration)\n            value = remove_weeks(value) if value.parts.to_h.key?(:weeks)\n            value.iso8601(precision: @scale)\n          end\n\n          # Always use the numeric value for schema dumper\n          def type_cast_for_schema(value)\n            cast(value).value.inspect\n          end\n\n          # Check if the user input has the correct format\n          def assert_valid_value(value)\n            # TODO: Implement!\n          end\n\n          # Transform a list of parts into a duration object\n          def parts_to_duration(parts)\n            parts = parts.to_h.slice(*CAST_PARTS)\n            return 0.seconds if parts.blank?\n\n            seconds = 0\n            parts = parts.map do |part, num|\n              num = num.to_i unless num.is_a?(Numeric)\n              next if num <= 0\n\n              seconds += num.send(part).value\n              [part.to_sym, num]\n            end\n\n            ActiveSupport::Duration.new(seconds, parts.compact)\n          end\n\n          # As PostgreSQL converts weeks in duration to days, intercept duration\n          # values with weeks and turn them into days before serializing so it\n          # won't break because the following issues\n          # https://github.com/crashtech/torque-postgresql/issues/26\n          # https://github.com/rails/rails/issues/34655\n          def remove_weeks(value)\n            parts = value.parts.dup\n            parts[:days] += parts.delete(:weeks) * 7\n            ActiveSupport::Duration.new(value.seconds.to_i, parts)\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/line.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class Line < Struct.new(:slope, :intercept)\n      alias c intercept\n      alias c= intercept=\n\n      def a=(value)\n        self.slope = vertical? ? Float::INFINITY : Rational(value, b)\n      end\n\n      def a\n        slope.numerator\n      end\n\n      def b=(value)\n        self.slope = value.zero? ? Float::INFINITY : Rational(a, value)\n      end\n\n      def b\n        vertical? ? 0 : slope.denominator\n      end\n\n      def horizontal?\n        slope.zero?\n      end\n\n      def vertical?\n        !slope.try(:infinite?).eql?(nil)\n      end\n    end\n\n    config.geometry.line_class ||= ::ActiveRecord.const_set('Line', Class.new(Line))\n\n    module Adapter\n      module OID\n        class Line < Torque::PostgreSQL::GeometryBuilder\n\n          PIECES = %i[a b c].freeze\n          FORMATION = '{%s,%s,%s}'.freeze\n\n          protected\n\n            def build_klass(*args)\n              return nil if args.empty?\n              check_invalid_format!(args)\n\n              a, b, c = args.try(:first, pieces.size)&.map(&:to_f)\n              slope = b.zero? ? Float::INFINITY : Rational(a, b)\n              config_class.new(slope, c)\n            end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/range.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module OID\n        class Range < ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Range\n          HASH_PICK = %i[from start end to].freeze\n\n          module Comparison\n            def <=>(other)\n              return super unless other.acts_like?(:date) || other.acts_like?(:time)\n              other = other.to_time if other.acts_like?(:date)\n              super other.to_i\n            end\n          end\n\n          def cast_value(value)\n            case value\n            when ::Array\n              cast_custom(value[0], value[1])\n            when ::Hash\n              pieces = value.with_indifferent_access.values_at(*HASH_PICK)\n              cast_custom(pieces[0] || pieces[1], pieces[2] || pieces[3])\n            else\n              super\n            end\n          end\n\n          def map(value) # :nodoc:\n            return value unless value.respond_to?(:first)\n            from = yield(value.first)\n            to = yield(value.last)\n            cast_custom(from, to)\n          end\n\n          private\n\n            def cast_custom(from, to)\n              from = custom_cast_single(from, true)\n              to = custom_cast_single(to)\n              ::Range.new(from, to)\n            end\n\n            def custom_cast_single(value, negative = false)\n              value.blank? ? custom_infinity(negative) : subtype.deserialize(value)\n            end\n\n            def custom_infinity(negative)\n              negative ? -::Float::INFINITY : ::Float::INFINITY\n            end\n        end\n\n        ::ActiveRecord::ConnectionAdapters::PostgreSQL::OID.send(:remove_const, :Range)\n        ::ActiveRecord::ConnectionAdapters::PostgreSQL::OID.const_set(:Range, Range)\n\n        ::Float.prepend(Range::Comparison)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid/segment.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class Segment < Struct.new(:point0, :point1)\n      def x1=(value)\n        self.point0 = new_point(value, y1)\n      end\n\n      def x1\n        point0.x\n      end\n\n      def y1=(value)\n        self.point0 = new_point(x1, value)\n      end\n\n      def y1\n        point0.y\n      end\n\n      def x2=(value)\n        self.point1 = new_point(value, y2)\n      end\n\n      def x2\n        point1.x\n      end\n\n      def y2=(value)\n        self.point1 = new_point(x2, value)\n      end\n\n      def y2\n        point1.y\n      end\n\n      private\n\n        def new_point(x, y)\n          Torque::PostgreSQL.config.geometry.point_class.new(x, y)\n        end\n    end\n\n    config.geometry.segment_class ||= ::ActiveRecord.const_set('Segment', Class.new(Segment))\n\n    module Adapter\n      module OID\n        class Segment < Torque::PostgreSQL::GeometryBuilder\n\n          PIECES = %i[x1 y1 x2 y2].freeze\n          FORMATION = '((%s,%s),(%s,%s))'.freeze\n\n          protected\n\n            def point_class\n              Torque::PostgreSQL.config.geometry.point_class\n            end\n\n            def build_klass(*args)\n              return nil if args.empty?\n              check_invalid_format!(args)\n\n              x1, y1, x2, y2 = args.try(:first, pieces.size)&.map(&:to_f)\n              config_class.new(\n                point_class.new(x1, y1),\n                point_class.new(x2, y2),\n              )\n            end\n\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/oid.rb",
    "content": "require_relative 'oid/array'\nrequire_relative 'oid/range'\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/quoting.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module Quoting\n        QUOTED_TYPE_NAMES = Concurrent::Map.new\n\n        Name = ActiveRecord::ConnectionAdapters::PostgreSQL::Name\n        Column = ActiveRecord::ConnectionAdapters::PostgreSQL::Column\n        ColumnDefinition = ActiveRecord::ConnectionAdapters::ColumnDefinition\n        Utils = ActiveRecord::ConnectionAdapters::PostgreSQL::Utils\n\n        # Quotes type names for use in SQL queries.\n        def quote_type_name(name, *args)\n          QUOTED_TYPE_NAMES[args] ||= begin\n            name = name.to_s\n            args << 'public' if args.empty? && !name.include?('.')\n            quote_identifier_name(name, *args)\n          end\n        end\n\n        # Make sure to support all sorts of different compositions of names\n        def quote_identifier_name(name, schema = nil)\n          name = Utils.extract_schema_qualified_name(name.to_s) unless name.is_a?(Name)\n          name.instance_variable_set(:@schema, Utils.unquote_identifier(schema.to_s)) if schema\n          name.quoted.freeze\n        end\n\n        def quote_default_expression(value, column)\n          return super unless value.class <= Array || value.class <= Set\n\n          type =\n            if column.is_a?(ColumnDefinition) && column.options.try(:[], :array)\n              # This is the general way\n              lookup_cast_type(column.sql_type)\n            elsif column.is_a?(Column) && column.array?\n              # When using +change_column_default+\n              lookup_cast_type_from_column(column)\n            end\n\n          type.nil? ? super : quote(type.serialize(value.to_a))\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/schema_creation.rb",
    "content": "module Torque\n  module PostgreSQL\n    module Adapter\n      module SchemaCreation\n\n        # Inherits are now setup via table options, but keep the implementation\n        # supported by this gem\n        def add_table_options!(create_sql, o)\n          if o.inherits.present?\n            # Make sure we always have parenthesis\n            create_sql << '()' unless create_sql[-1] == ')'\n\n            tables = o.inherits.map(&method(:quote_table_name))\n            create_sql << \" INHERITS ( #{tables.join(' , ')} )\"\n          end\n\n          super(create_sql, o)\n        end\n      end\n\n      ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaCreation.prepend SchemaCreation\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/schema_definitions.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module ColumnMethods\n\n        # Adds a search language column to the table. See +add_search_language+\n        def search_language(*names, **options)\n          raise ArgumentError, \"Missing column name(s) for search_language\" if names.empty?\n          names.each { |name| column(name, :regconfig, **options) }\n        end\n\n        # Add a search vector column to the table. See +add_search_vector+\n        def search_vector(*names, columns:, **options)\n          raise ArgumentError, \"Missing column name(s) for search_vector\" if names.empty?\n          options = Attributes::Builder.search_vector_options(columns: columns, **options)\n          names.each { |name| column(name, :virtual, **options) }\n        end\n\n      end\n\n      module TableDefinition\n        include ColumnMethods\n\n        attr_reader :inherits\n\n        def initialize(*args, **options)\n          super\n\n          @inherits = Array.wrap(options.delete(:inherits)).flatten.compact \\\n            if options.key?(:inherits)\n        end\n\n        def set_primary_key(tn, id, primary_key, *, **)\n          super unless @inherits.present? && primary_key.blank? && id == :primary_key\n        end\n\n        private\n\n          def create_column_definition(name, type, options)\n            if type == :enum_set\n              type = :enum\n              options ||= {}\n              options[:array] = true\n            end\n\n            super(name, type, options)\n          end\n      end\n\n      # Add exclusive support for versioned commands when importing from schema\n      # dump. This ensures that such methods are not available in regular\n      # migrations.\n      module Definition\n\n        def create_function(name, version:, dir: pool.migrations_paths)\n          return super unless VersionedCommands.valid_type?(:function)\n          execute VersionedCommands.fetch_command(dir, :function, name, version)\n        end\n\n        def create_type(name, version:, dir: pool.migrations_paths)\n          return super unless VersionedCommands.valid_type?(:type)\n          execute VersionedCommands.fetch_command(dir, :type, name, version)\n        end\n\n        def create_view(name, version:, dir: pool.migrations_paths)\n          return super unless VersionedCommands.valid_type?(:view)\n          execute VersionedCommands.fetch_command(dir, :view, name, version)\n        end\n\n      end\n\n      ActiveRecord::ConnectionAdapters::PostgreSQL::Table.include ColumnMethods\n      ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition.include TableDefinition\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/schema_dumper.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module SchemaDumper\n        SEARCH_VECTOR_SCANNER = /\n          to_tsvector\\(\n            ('[^']+'|[a-z][a-z0-9_]*)[^,]*,[^\\(]*\n            \\(?coalesce\\(([a-z][a-z0-9_]*)[^\\)]*\\)\\)?\n          (?:::[^\\)]*\\))?\n          (?:\\s*,\\s*'([A-D])')?\n        /ix\n\n        def initialize(*)\n          super\n\n          if with_versioned_commands?\n            @versioned_commands = VersionedCommands::SchemaTable.new(@connection.pool)\n            @ignore_tables << @versioned_commands.table_name\n          end\n        end\n\n        def dump(stream) # :nodoc:\n          @connection.dump_mode!\n          super\n\n          @connection.dump_mode!\n          stream\n        end\n\n        private\n\n          def types(stream) # :nodoc:\n            super\n\n            versioned_commands(stream, :type)\n            versioned_commands(stream, :function)\n          end\n\n          def tables(stream) # :nodoc:\n            around_tables(stream) { dump_tables(stream) }\n          end\n\n          def around_tables(stream)\n            functions(stream) if fx_functions_position == :beginning\n\n            yield\n            versioned_commands(stream, :view, true)\n\n            functions(stream) if fx_functions_position == :end\n            triggers(stream) if defined?(::Fx::SchemaDumper::Trigger)\n          end\n\n          def dump_tables(stream)\n            inherited_tables = @connection.inherited_tables\n            sorted_tables = (@connection.tables - @connection.views).filter_map do |table_name|\n              name_parts = table_name.split(/(?:public)?\\./).reverse.compact_blank\n              next if ignored?(table_name) || ignored?(name_parts.join('.'))\n\n              [table_name, name_parts]\n            end.sort_by(&:last).to_h\n\n            postponed = []\n\n            stream.puts \"  # These are the common tables\"\n            sorted_tables.each do |table, (table_name, _)|\n              next postponed << table if inherited_tables.key?(table_name)\n\n              table(table, stream)\n              stream.puts # Ideally we would not do this in the last one\n            end\n\n            if postponed.present?\n              stream.puts \"  # These are tables that have inheritance\"\n              postponed.each do |table|\n                sub_stream = StringIO.new\n                table(table, sub_stream)\n                stream.puts sub_stream.string.sub(/do \\|t\\|\\n  end/, '')\n                stream.puts\n              end\n            end\n\n            # Fixes double new lines to single new lines\n            stream.pos -= 1\n\n            # dump foreign keys at the end to make sure all dependent tables exist.\n            if @connection.supports_foreign_keys?\n              foreign_keys_stream = StringIO.new\n              sorted_tables.each do |(tbl, *)|\n                foreign_keys(tbl, foreign_keys_stream)\n              end\n\n              foreign_keys_string = foreign_keys_stream.string\n              stream.puts if foreign_keys_string.length > 0\n              stream.print foreign_keys_string\n            end\n          end\n\n          # Make sure to remove the schema from the table name\n          def remove_prefix_and_suffix(table)\n            super(table.sub(/\\A[a-z0-9_]*\\./, ''))\n          end\n\n          # Dump user defined schemas\n          def schemas(stream)\n            return super if !PostgreSQL.config.schemas.enabled\n            return if (list = (@connection.user_defined_schemas - ['public'])).empty?\n\n            stream.puts \"  # Custom schemas defined in this database.\"\n            list.each { |name| stream.puts \"  create_schema \\\"#{name}\\\", force: :cascade\" }\n            stream.puts\n          end\n\n          # Adjust the schema type for search vector\n          def schema_type_with_virtual(column)\n            column.virtual? && column.type == :tsvector ? :search_vector : super\n          end\n\n          # Adjust the schema type for search language\n          def schema_type(column)\n            column.sql_type == 'regconfig' ? :search_language : super\n          end\n\n          # Adjust table options to make the dump more readable\n          def prepare_column_options(column)\n            options = super\n            parse_search_vector_options(column, options) if column.type == :tsvector\n            options\n          end\n\n          # Parse the search vector operation into a readable format\n          def parse_search_vector_options(column, options)\n            settings = options[:as]&.scan(SEARCH_VECTOR_SCANNER)\n            return if settings.blank?\n\n            languages = settings.map(&:shift).uniq\n            return if languages.many?\n\n            language = languages.first\n            language = language[0] == \"'\" ? language[1..-2] : language.to_sym\n            columns = parse_search_vector_columns(settings)\n\n            options.except!(:as, :type)\n            options.merge!(language: language.inspect, columns: columns)\n          end\n\n          # Simplify the whole columns configuration to make it more manageable\n          def parse_search_vector_columns(settings)\n            return \":#{settings.first.first}\" if settings.one?\n\n            settings = settings.sort_by(&:last)\n            weights = %w[A B C D]\n\n            columns = settings.each.with_index.reduce([]) do |acc, (setting, index)|\n              column, weight = setting\n              break if (weights[index] || 'D') != weight\n\n              acc << column\n              acc\n            end\n\n            return columns.map(&:to_sym).inspect if columns\n            settings.to_h.transform_values(&:inspect)\n          end\n\n          # Simply add all versioned commands to the stream\n          def versioned_commands(stream, type, add_newline = false)\n            return unless with_versioned_commands?\n\n            list = @versioned_commands.versions_of(type.to_s)\n            return if list.empty?\n\n            existing = list_existing_versioned_commands(type)\n\n            stream.puts if add_newline\n            stream.puts \"  # These are #{type.to_s.pluralize} managed by versioned commands\"\n            list.each do |(name, version)|\n              next if existing.exclude?(name)\n\n              stream.puts \"  create_#{type} \\\"#{name}\\\", version: #{version}\"\n            end\n            stream.puts unless add_newline\n          end\n\n          def list_existing_versioned_commands(type)\n            @connection.list_versioned_commands(type).each_with_object(Set.new) do |entry, set|\n              set << (entry.first == 'public' ? entry.last : entry.join('_'))\n            end\n          end\n\n          def with_versioned_commands?\n            PostgreSQL.config.versioned_commands.enabled\n          end\n\n          def fx_functions_position\n            return unless defined?(::Fx::SchemaDumper::Function)\n            Fx.configuration.dump_functions_at_beginning_of_schema ? :beginning : :end\n          end\n      end\n\n      ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaDumper.prepend SchemaDumper\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/schema_overrides.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module SchemaOverrides\n        # This adds better support for handling the quotation of table names\n        def quote_table_name(name)\n          ActiveRecord::ConnectionAdapters::PostgreSQL::Quoting::QUOTED_TABLE_NAMES.then do |m|\n            m[name] ||= quote_identifier_name(name)\n          end\n        end\n\n        %i[\n          table_exists? indexes index_exists? columns column_exists? primary_key\n          create_table change_table add_column add_columns remove_columns remove_column\n          change_column change_column_default change_column_null rename_column\n          add_index remove_index rename_index index_name_exists? foreign_keys\n          add_timestamps remove_timestamps change_table_comment change_column_comment\n          bulk_change_table\n\n          rename_table add_foreign_key remove_foreign_key foreign_key_exists?\n        ].each do |method_name|\n          define_method(method_name) do |table_name, *args, **options, &block|\n            table_name = sanitize_name_with_schema(table_name, options)\n            super(table_name, *args, **options, &block)\n          end\n        end\n\n        def drop_table(*table_names, **options)\n          table_names = table_names.map { |name| sanitize_name_with_schema(name, options.dup) }\n          super(*table_names, **options)\n        end\n\n        private\n\n          def validate_table_length!(table_name)\n            super(table_name.to_s)\n          end\n      end\n\n      include SchemaOverrides\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter/schema_statements.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      module SchemaStatements\n        # Drops a type\n        def drop_type(name, options = {})\n          force = options.fetch(:force, '').upcase\n          check = 'IF EXISTS' if options.fetch(:check, true)\n          name = sanitize_name_with_schema(name, options)\n\n          internal_exec_query(<<-SQL.squish).tap { reload_type_map }\n            DROP TYPE #{check}\n            #{quote_type_name(name)} #{force}\n          SQL\n        end\n\n        # Renames a type\n        def rename_type(type_name, new_name, options = {})\n          type_name = sanitize_name_with_schema(type_name, options)\n          internal_exec_query(<<-SQL.squish).tap { reload_type_map }\n            ALTER TYPE #{quote_type_name(type_name)}\n            RENAME TO #{Quoting::Name.new(nil, new_name.to_s).quoted}\n          SQL\n        end\n\n        # Creates a column that stores the underlying language of the record so\n        # that a search vector can be created dynamically based on it. It uses\n        # a `regconfig` type, so string conversions are mandatory\n        def add_search_language(table, name, options = {})\n          add_column(table, name, :regconfig, options)\n        end\n\n        # Creates a column and setup a search vector as a virtual column. The\n        # options are dev-friendly and controls how the vector function will be\n        # defined\n        #\n        # === Options\n        # [:columns]\n        #   The list of columns that will be used to create the search vector.\n        #   It can be a single column, an array of columns, or a hash as a\n        #   combination of column name and weight (A, B, C, or D).\n        # [:language]\n        #   Specify the language config to be used for the search vector. If a\n        #   string is provided, then the value will be statically embedded. If a\n        #   symbol is provided, then it will reference another column.\n        # [:stored]\n        #   Specify if the value should be stored in the database. As of now,\n        #   PostgreSQL only supports `true`, which will create a stored column.\n        def add_search_vector(table, name, columns, options = {})\n          options = Builder.search_vector_options(columns: columns, **options)\n          add_column(table, name, options.delete(:type), options)\n        end\n\n        # Changes the enumerator by adding new values\n        #\n        # Example:\n        #   add_enum_values 'status', ['baz']\n        #   add_enum_values 'status', ['baz'], before: 'bar'\n        #   add_enum_values 'status', ['baz'], after: 'foo'\n        #   add_enum_values 'status', ['baz'], prepend: true\n        def add_enum_values(name, values, options = {})\n          name   = sanitize_name_with_schema(name, options)\n          before = options.fetch(:before, false)\n          after  = options.fetch(:after,  false)\n\n          before = enum_values(name).first if options.key? :prepend\n          before = quote(before) unless before == false\n          after  = quote(after)  unless after == false\n\n          quote_enum_values(name, values, options).each do |value|\n            reference = \"BEFORE #{before}\" unless before == false\n            reference = \"AFTER  #{after}\"  unless after == false\n            execute <<-SQL.squish\n              ALTER TYPE #{quote_type_name(name)}\n              ADD VALUE #{value} #{reference}\n            SQL\n\n            before = false\n            after  = value\n          end\n        end\n\n        # Returns all values that an enum type can have.\n        def enum_values(name)\n          select_values(<<-SQL.squish, 'SCHEMA')\n            SELECT enumlabel FROM pg_enum\n            WHERE enumtypid = #{quote(name)}::regtype::oid\n            ORDER BY enumsortorder\n          SQL\n        end\n\n\n        # Add the schema option when extracting table options\n        def table_options(table_name)\n          options = super\n\n          if PostgreSQL.config.schemas.enabled\n            table, schema = table_name.split('.').reverse\n            if table.present? && schema.present? && schema != current_schema\n              options[:schema] = schema\n            end\n          end\n\n          if options[:options]&.start_with?('INHERITS (')\n            options.delete(:options)\n\n            tables = inherited_table_names(table_name)\n            options[:inherits] = tables.one? ? tables.first : tables\n          end\n\n          options\n        end\n\n        # When dumping the schema we need to add all schemas, not only those\n        # active for the current +schema_search_path+\n        def quoted_scope(name = nil, type: nil)\n          return super unless name.nil?\n\n          scope = super\n          global = scope[:schema].start_with?('ANY (')\n          scope[:schema] = \"ANY ('{#{user_defined_schemas.join(',')}}')\"\n          scope\n        end\n\n        # Fix the query to include the schema on tables names when dumping\n        def data_source_sql(name = nil, type: nil)\n          return super unless name.nil?\n\n          super.sub('SELECT c.relname FROM', \"SELECT n.nspname || '.' || c.relname FROM\")\n        end\n\n        # Add schema and inherits as one of the valid options for table\n        # definition\n        def valid_table_definition_options\n          super + [:schema, :inherits]\n        end\n\n        # Add proper support for schema load when using versioned commands\n        def assume_migrated_upto_version(version)\n          return super unless PostgreSQL.config.versioned_commands.enabled\n          return super if (commands = pool.migration_context.migration_commands).empty?\n\n          version = version.to_i\n          migration_context = pool.migration_context\n          migrated = migration_context.get_all_versions\n          versions = migration_context.migrations.map(&:version)\n\n          inserting = (versions - migrated).select { |v| v < version }\n          inserting << version unless migrated.include?(version)\n          return if inserting.empty?\n\n          duplicated = inserting.tally.filter_map { |v, count| v if count > 1 }\n          raise <<~MSG.squish if duplicated.present?\n            Duplicate migration #{duplicated.first}.\n            Please renumber your migrations to resolve the conflict.\n          MSG\n\n          VersionedCommands::SchemaTable.new(pool).create_table\n          execute insert_versions_sql(inserting)\n        end\n\n        # Add proper support for schema load when using versioned commands\n        def insert_versions_sql(versions)\n          return super unless PostgreSQL.config.versioned_commands.enabled\n\n          commands = pool.migration_context.migration_commands.select do |migration|\n            versions.include?(migration.version)\n          end\n\n          return super if commands.empty?\n\n          table = quote_table_name(VersionedCommands::SchemaTable.new(pool).table_name)\n\n          sql = super(versions - commands.map(&:version))\n          sql << \"\\nINSERT INTO #{table} (version, type, object_name) VALUES\\n\"\n          sql << commands.map do |m|\n            +\"(#{quote(m.version)}, #{quote(m.type)}, #{quote(m.object_name)})\"\n          end.join(\",\\n\")\n          sql << \";\"\n          sql\n        end\n\n        private\n\n          # Remove the schema from the sequence name\n          def sequence_name_from_parts(table_name, column_name, suffix)\n            super(table_name.split('.').last, column_name, suffix)\n          end\n\n          # Helper for supporting schema name in several methods\n          def sanitize_name_with_schema(name, options)\n            return name if (schema = options&.delete(:schema)).blank?\n            Quoting::Name.new(schema.to_s, name.to_s)\n          end\n\n          def quote_enum_values(name, values, options)\n            prefix = options[:prefix]\n            prefix = name if prefix === true\n\n            suffix = options[:suffix]\n            suffix = name if suffix === true\n\n            values.map! do |value|\n              quote([prefix, value, suffix].compact.join('_'))\n            end\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/adapter.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'adapter/database_statements'\nrequire_relative 'adapter/oid'\nrequire_relative 'adapter/quoting'\nrequire_relative 'adapter/schema_creation'\nrequire_relative 'adapter/schema_definitions'\nrequire_relative 'adapter/schema_dumper'\nrequire_relative 'adapter/schema_statements'\n\nmodule Torque\n  module PostgreSQL\n    module Adapter\n      include Quoting\n      include DatabaseStatements\n      include SchemaStatements\n\n      # :nodoc:\n      class DeduplicatableArray < ::Array\n        def deduplicate\n          map { |value| -value }\n        end\n\n        alias :-@ :deduplicate\n      end\n\n      # Get the current PostgreSQL version as a Gem Version.\n      def version\n        @version ||= Gem::Version.new(\n          select_value('SELECT version()').match(/#{Adapter::ADAPTER_NAME} ([\\d\\.]+)/)[1]\n        )\n      end\n\n      # Add `inherits` and `schema` to the list of extracted table options\n      def extract_table_options!(options)\n        super.merge(options.extract!(:inherits, :schema))\n      end\n\n      # Allow filtered bulk insert by adding the where clause. This method is\n      # only used by +InsertAll+, so it somewhat safe to override it\n      def build_insert_sql(insert)\n        super.tap do |sql|\n          if insert.update_duplicates? && insert.where_condition?\n            if insert.returning\n              sql.sub!(' RETURNING ', \" WHERE #{insert.where} RETURNING \")\n            else\n              sql << \" WHERE #{insert.where}\"\n            end\n          end\n        end\n      end\n    end\n\n    ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend Adapter\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/infix_operation.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      Math = Module.new\n\n      def self.build_operations(operations)\n        default_alias = :visit_Arel_Nodes_InfixOperation\n\n        operations&.each do |name, operator|\n          klass_name = name.to_s.camelize\n          next if ::Arel::Nodes.const_defined?(klass_name)\n\n          klass = Class.new(::Arel::Nodes::InfixOperation)\n          operator = (-operator).to_sym\n          klass.send(:define_method, :initialize) { |*args| super(operator, *args) }\n\n          ::Arel::Nodes.const_set(klass_name, klass)\n          visitor = :\"visit_Arel_Nodes_#{klass_name}\"\n          ::Arel::Visitors::PostgreSQL.send(:alias_method, visitor, default_alias)\n\n          # Don't worry about quoting here, if the right side is something that\n          # doesn't need quoting, it will leave it as it is\n          Math.send(:define_method, klass_name.underscore) { |other| klass.new(self, other) }\n        end\n      end\n\n      ::Arel::Nodes::Node.include(Math)\n      ::Arel::Attribute.include(Math)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/join_source.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      module JoinSource\n        attr_accessor :only\n\n        def only?\n          only === true\n        end\n      end\n\n      ::Arel::Nodes::JoinSource.include JoinSource\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/nodes.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      module Nodes\n\n        class Cast < ::Arel::Nodes::Binary\n          include ::Arel::Expressions\n          include ::Arel::Predications\n          include ::Arel::AliasPredication\n          include ::Arel::OrderPredications\n          include ::Arel::Math\n\n          def initialize(left, right, array = false)\n            right = +right.to_s\n            right << '[]' if array\n            super left, right\n          end\n        end\n\n        class Ref < ::Arel::Nodes::Unary\n          attr_reader :reference\n          alias to_s expr\n\n          def initialize(expr, reference = nil)\n            @reference = reference\n            super expr\n          end\n\n          def as(other)\n            @reference&.as(other) || super\n          end\n        end\n\n      end\n\n      ::Arel.define_singleton_method(:array) do |*values, cast: nil|\n        values = values.first if values.size.eql?(1) && values.first.is_a?(::Enumerable)\n        result = ::Arel::Nodes.build_quoted(values)\n        result = result.pg_cast(cast, true) if cast.present?\n        result\n      end\n\n      ::Arel::Nodes::Function.include(::Arel::Math)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/operations.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      module Operations\n\n        # Create a cast operation\n        def pg_cast(type, array = false)\n          Nodes::Cast.new(self, type, array)\n        end\n\n        # Make sure to add proper support over AR's own +cast+ method while\n        # still allow attributes to be casted\n        def cast(type, array = false)\n          defined?(super) && !array ? super(type) : pg_cast(type, array)\n        end\n\n      end\n\n      ::Arel::Attributes::Attribute.include(Operations)\n      ::Arel::Nodes::SqlLiteral.include(Operations)\n      ::Arel::Nodes::Node.include(Operations)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/select_manager.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      module SelectManager\n\n        def only\n          @ctx.source.only = true\n        end\n\n      end\n\n      ::Arel::SelectManager.include SelectManager\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel/visitors.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Arel\n      module Visitors\n        # Add ONLY modifier to query\n        def visit_Arel_Nodes_JoinSource(o, collector)\n          collector << 'ONLY ' if o.only?\n          super\n        end\n\n        # Allow quoted arrays to get here\n        def visit_Arel_Nodes_Quoted(o, collector)\n          return super unless o.expr.is_a?(::Enumerable)\n          quote_array(o.expr, collector)\n        end\n\n        # Allow quoted arrays to get here\n        def visit_Arel_Nodes_Casted(o, collector)\n          value = o.value_for_database\n          klass = ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array::Data\n          return super unless value.is_a?(klass)\n          quote_array(value.values, collector)\n        end\n\n        ## TORQUE VISITORS\n        def visit_Torque_PostgreSQL_Arel_Nodes_Ref(o, collector)\n          collector << quote_table_name(o.expr)\n        end\n\n        # Allow casting any node\n        def visit_Torque_PostgreSQL_Arel_Nodes_Cast(o, collector)\n          visit(o.left, collector) << '::' << o.right\n        end\n\n        private\n\n          def quote_array(value, collector)\n            value = value.map(&::Arel::Nodes.method(:build_quoted))\n\n            collector << 'ARRAY['\n            visit_Array(value, collector)\n            collector << ']'\n          end\n      end\n\n      ::Arel::Visitors::PostgreSQL.prepend(Visitors)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/arel.rb",
    "content": "require_relative 'arel/infix_operation'\nrequire_relative 'arel/join_source'\nrequire_relative 'arel/nodes'\nrequire_relative 'arel/operations'\nrequire_relative 'arel/select_manager'\nrequire_relative 'arel/visitors'\n"
  },
  {
    "path": "lib/torque/postgresql/associations/association_scope.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module AssociationScope\n        # A customized predicate builder for array attributes that can be used\n        # standalone and changes the behavior of the blank state\n        class PredicateBuilderArray\n          include PredicateBuilder::ArrayHandler\n\n          def call_with_empty(attribute)\n            '1=0' # Does not match records with empty arrays\n          end\n        end\n\n        module ClassMethods\n          def get_bind_values(*)\n            super.flatten\n          end\n        end\n\n        private\n\n          # When loading a join by value (last as in we know which records to\n          # load) only has many array need to have a different behavior, so it\n          # can properly match array values\n          def last_chain_scope(scope, reflection, owner)\n            return super unless reflection.connected_through_array?\n            return super if reflection.macro == :belongs_to_many\n\n            constraint = PredicateBuilderArray.new.call_for_array(\n              reflection.array_attribute,\n              transform_value(owner[reflection.join_foreign_key]),\n            )\n\n            scope.where!(constraint)\n          end\n\n          # When loading a join by reference (next as in we don't know which\n          # records to load), it can take advantage of the new predicate builder\n          # to figure out the most optimal way to connect both properties\n          def next_chain_scope(scope, reflection, next_reflection)\n            return super unless reflection.connected_through_array?\n\n            primary_key = reflection.aliased_table[reflection.join_primary_key]\n            foreign_key = next_reflection.aliased_table[reflection.join_foreign_key]\n            constraint = PredicateBuilder::ArelAttributeHandler.call(primary_key, foreign_key)\n\n            scope.joins!(join(foreign_table, constraint))\n          end\n\n          # For array-like values, it needs to call the method as many times as\n          # the array size\n          def transform_value(value)\n            if value.is_a?(::Enumerable)\n              value.map { |v| value_transformation.call(v) }\n            else\n              value_transformation.call(value)\n            end\n          end\n      end\n\n      ::ActiveRecord::Associations::AssociationScope.singleton_class.prepend(AssociationScope::ClassMethods)\n      ::ActiveRecord::Associations::AssociationScope.prepend(AssociationScope)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/belongs_to_many_association.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'active_record/associations/collection_association'\n\n# FIXME: build, create\nmodule Torque\n  module PostgreSQL\n    module Associations\n      class BelongsToManyAssociation < ::ActiveRecord::Associations::CollectionAssociation\n        include ::ActiveRecord::Associations::ForeignAssociation\n\n        ## CUSTOM\n        def ids_reader\n          if loaded?\n            target.pluck(reflection.active_record_primary_key)\n          elsif !target.empty?\n            load_target.pluck(reflection.active_record_primary_key)\n          else\n            stale_state || column_default_value\n          end\n        end\n\n        def ids_writer(ids)\n          ids = ids.presence || column_default_value\n          owner.write_attribute(source_attr, ids)\n          return unless owner.persisted? && owner.attribute_changed?(source_attr)\n\n          owner.update_attribute(source_attr, ids)\n        end\n\n        def size\n          if loaded?\n            target.size\n          elsif !target.empty?\n            unsaved_records = target.select(&:new_record?)\n            unsaved_records.size + stale_state.size\n          else\n            stale_state&.size || 0\n          end\n        end\n\n        def empty?\n          size.zero?\n        end\n\n        def include?(record)\n          return false unless record.is_a?(reflection.klass)\n          return include_in_memory?(record) if record.new_record?\n\n          (!target.empty? && target.include?(record)) ||\n            stale_state&.include?(record.read_attribute(klass_attr))\n        end\n\n        def load_target\n          if stale_target? || find_target?\n            persisted_records = (find_target || []) + target.extract!(&:persisted?)\n            @target = merge_target_lists(persisted_records, target)\n          end\n\n          loaded!\n          target\n        end\n\n        def build_changes(from_target = false)\n          return yield if defined?(@_building_changes) && @_building_changes\n\n          @_building_changes = true\n          yield.tap { ids_writer(from_target ? ids_reader : stale_state) }\n        ensure\n          @_building_changes = nil\n        end\n\n        def trigger(prefix, before_ids, after_ids)\n          removed_ids = before_ids - after_ids\n          added_ids = after_ids - before_ids\n\n          if removed_ids.any?\n            callbacks_for(method = :\"#{prefix}_remove\").each do |callback|\n              target_scope.find(removed_ids).each do |record|\n                callback.call(method, owner, record)\n              end\n            end\n          end\n\n          if added_ids.any?\n            callbacks_for(method = :\"#{prefix}_add\").each do |callback|\n              target_scope.find(added_ids).each do |record|\n                callback.call(method, owner, record)\n              end\n            end\n          end\n        end\n\n        ## HAS MANY\n        def handle_dependency\n          case options[:dependent]\n          when :restrict_with_exception\n            raise ActiveRecord::DeleteRestrictionError.new(reflection.name) unless empty?\n\n          when :restrict_with_error\n            unless empty?\n              record = owner.class.human_attribute_name(reflection.name).downcase\n              owner.errors.add(:base, :'restrict_dependent_destroy.has_many', record: record)\n              throw(:abort)\n            end\n\n          when :destroy\n            load_target.each { |t| t.destroyed_by_association = reflection }\n            destroy_all\n          when :destroy_async\n            load_target.each do |t|\n              t.destroyed_by_association = reflection\n            end\n\n            unless target.empty?\n              association_class = target.first.class\n              primary_key_column = association_class.primary_key.to_sym\n\n              ids = target.collect do |assoc|\n                assoc.public_send(primary_key_column)\n              end\n\n              enqueue_destroy_association(\n                owner_model_name: owner.class.to_s,\n                owner_id: owner.id,\n                association_class: association_class.to_s,\n                association_ids: ids,\n                association_primary_key_column: primary_key_column,\n                ensuring_owner_was_method: options.fetch(:ensuring_owner_was, nil)\n              )\n            end\n          else\n            delete_all\n          end\n        end\n\n        def insert_record(record, *)\n          (record.persisted? || super).tap do |saved|\n            ids_rewriter(record.read_attribute(klass_attr), :<<) if saved\n          end\n        end\n\n        ## BELONGS TO\n        def default(&block)\n          writer(owner.instance_exec(&block)) if reader.nil?\n        end\n\n        private\n\n          ## CUSTOM\n          def _create_record(attributes, raises = false, &block)\n            if attributes.is_a?(Array)\n              attributes.collect { |attr| _create_record(attr, raises, &block) }\n            else\n              build_record(attributes, &block).tap do |record|\n                transaction do\n                  result = nil\n                  add_to_target(record) do\n                    result = insert_record(record, true, raises) { @_was_loaded = loaded? }\n                  end\n                  raise ActiveRecord::Rollback unless result\n                end\n              end\n            end\n          end\n\n          # When the idea is to nullify the association, then just set the owner\n          # +primary_key+ as empty\n          def delete_count(method, scope, ids)\n            size_cache = scope.delete_all if method == :delete_all\n            (size_cache || ids.size).tap { ids_rewriter(ids, :-) }\n          end\n\n          def delete_or_nullify_all_records(method)\n            delete_count(method, scope, ids_reader)\n          end\n\n          # Deletes the records according to the <tt>:dependent</tt> option.\n          def delete_records(records, method)\n            ids = read_records_ids(records)\n\n            if method == :destroy\n              records.each(&:destroy!)\n              ids_rewriter(ids, :-)\n            else\n              scope = self.scope.where(klass_attr => records)\n              delete_count(method, scope, ids)\n            end\n          end\n\n          def source_attr\n            reflection.foreign_key\n          end\n\n          def klass_attr\n            reflection.active_record_primary_key\n          end\n\n          def read_records_ids(records)\n            return unless records.present?\n            Array.wrap(records).each_with_object(klass_attr).map(&:read_attribute).presence\n          end\n\n          def ids_rewriter(ids, operator)\n            list = owner[source_attr] ||= []\n            list = list.public_send(operator, ids)\n            owner[source_attr] = list.uniq.compact.presence || column_default_value\n\n            return if @_building_changes || !owner.persisted?\n            owner.update_attribute(source_attr, list)\n          end\n\n          def column_default_value\n            owner.class.columns_hash[source_attr].default\n          end\n\n          def callback(*)\n            true # This is handled/trigger when the owner record actually changes\n          end\n\n          ## HAS MANY\n          def replace_records(*)\n            build_changes(true) { super }\n          end\n\n          def concat_records(*)\n            build_changes(true) { super }\n          end\n\n          def delete_or_destroy(*)\n            build_changes(true) { super }\n          end\n\n          def difference(a, b)\n            a - b\n          end\n\n          def intersection(a, b)\n            a & b\n          end\n\n          ## BELONGS TO\n          def scope_for_create\n            super.except!(klass.primary_key)\n          end\n\n          def find_target?\n            !loaded? && foreign_key_present? && klass\n          end\n\n          def foreign_key_present?\n            stale_state.present?\n          end\n\n          def invertible_for?(record)\n            return unless (inverse = inverse_reflection_for(record))\n            collection_class = ::ActiveRecord::Associations::HasManyAssociation\n            inverse.is_a?(collection_class) && inverse.connected_through_array?\n          end\n\n          def stale_state\n            owner.read_attribute(source_attr)\n          end\n      end\n\n      ::ActiveRecord::Associations.const_set(:BelongsToManyAssociation, BelongsToManyAssociation)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/builder/belongs_to_many.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module Builder\n        class BelongsToMany < ::ActiveRecord::Associations::Builder::CollectionAssociation\n          def self.macro\n            :belongs_to_many\n          end\n\n          def self.valid_options(options)\n            super + [:touch, :optional, :default, :dependent, :primary_key, :required]\n          end\n\n          def self.valid_dependent_options\n            [:restrict_with_error, :restrict_with_exception]\n          end\n\n          def self.define_callbacks(model, reflection)\n            super\n            add_touch_callbacks(model, reflection)   if reflection.options[:touch]\n            add_default_callbacks(model, reflection) if reflection.options[:default]\n            add_change_callbacks(model, reflection)\n          end\n\n          def self.define_readers(mixin, name)\n            mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1\n              def #{name}\n                association(:#{name}).reader\n              end\n            CODE\n          end\n\n          def self.define_writers(mixin, name)\n            mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1\n              def #{name}=(value)\n                association(:#{name}).writer(value)\n              end\n            CODE\n          end\n\n          def self.add_default_callbacks(model, reflection)\n            model.before_validation ->(o) do\n              o.association(reflection.name).default(&reflection.options[:default])\n            end\n          end\n\n          def self.add_touch_callbacks(model, reflection)\n            foreign_key = reflection.foreign_key\n            n           = reflection.name\n            touch       = reflection.options[:touch]\n\n            callback = ->(changes_method) do\n              ->(record) do\n                BelongsToMany.touch_record(record, record.send(changes_method), foreign_key,\n                  n, touch, belongs_to_touch_method)\n              end\n            end\n\n            model.after_create callback.call(:saved_changes), if: :saved_changes?\n            model.after_update callback.call(:saved_changes), if: :saved_changes?\n            model.after_destroy callback.call(:changes_to_save)\n            model.after_touch callback.call(:changes_to_save)\n          end\n\n          def self.touch_record(o, changes, foreign_key, name, touch, touch_method) # :nodoc:\n            old_foreign_ids = changes[foreign_key] && changes[foreign_key].first\n\n            if old_foreign_ids.present?\n              association = o.association(name)\n              reflection = association.reflection\n              klass = association.klass\n\n              primary_key = reflection.association_primary_key(klass)\n              old_records = klass.find_by(primary_key => old_foreign_ids)\n\n              old_records&.map do |old_record|\n                if touch != true\n                  old_record.send(touch_method, touch)\n                else\n                  old_record.send(touch_method)\n                end\n              end\n            end\n\n            o.send(name)&.map do |record|\n              if record && record.persisted?\n                if touch != true\n                  record.send(touch_method, touch)\n                else\n                  record.send(touch_method)\n                end\n              end\n            end\n          end\n\n          def self.add_change_callbacks(model, reflection)\n            foreign_key = reflection.foreign_key\n            name = reflection.name\n\n            model.before_save ->(record) do\n              before, after = record.changes[foreign_key]\n              record.association(name).trigger(:before, before, after) if before && after\n            end\n\n            model.after_save ->(record) do\n              before, after = record.previous_changes[foreign_key]\n              record.association(name).trigger(:after, before, after) if before && after\n            end\n          end\n\n          def self.add_destroy_callbacks(model, reflection)\n            model.after_destroy lambda { |o| o.association(reflection.name).handle_dependency }\n          end\n\n          def self.define_validations(model, reflection)\n            if reflection.options.key?(:required)\n              reflection.options[:optional] = !reflection.options.delete(:required)\n            end\n\n            if reflection.options[:optional].nil?\n              required = model.belongs_to_many_required_by_default\n            else\n              required = !reflection.options[:optional]\n            end\n\n            super\n\n            if required\n              model.validates_presence_of reflection.name, message: :required\n            end\n          end\n        end\n\n        ::ActiveRecord::Associations::Builder.const_set(:BelongsToMany, BelongsToMany)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/builder/has_many.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module Builder\n        module HasMany\n          def valid_options(options)\n            super + [:array]\n          end\n        end\n\n        ::ActiveRecord::Associations::Builder::HasMany.extend(HasMany)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/builder.rb",
    "content": "require_relative 'builder/belongs_to_many'\nrequire_relative 'builder/has_many'\n"
  },
  {
    "path": "lib/torque/postgresql/associations/foreign_association.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module ForeignAssociation\n\n        # There is no problem of adding temporary items on target because\n        # CollectionProxy will handle memory and persisted relationship\n        def inversed_from(record)\n          return super unless reflection.connected_through_array?\n\n          self.target ||= []\n          self.target.push(record) unless self.target.include?(record)\n          @inversed = self.target.present?\n        end\n\n        # The binds and the cache are getting mixed and caching the wrong query\n        def skip_statement_cache?(*)\n          super || reflection.connected_through_array?\n        end\n\n        private\n\n          # This is mainly for the has many when connect through an array to add\n          # its id to the list of the inverse belongs to many association\n          def set_owner_attributes(record)\n            return super unless reflection.connected_through_array?\n\n            add_id = owner[reflection.active_record_primary_key]\n            list = record[reflection.foreign_key] ||= []\n            list.push(add_id) unless list.include?(add_id)\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/preloader/association.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module Preloader\n        module Association\n\n          delegate :connected_through_array?, to: :@reflection\n\n          # For reflections connected through an array, make sure to properly\n          # decuple the list of ids and set them as associated with the owner\n          def run\n            return self if run?\n            return super unless connected_through_array?\n\n            @run = true\n            send(\"run_array_for_#{@reflection.macro}\")\n            self\n          end\n\n          # Correctly correlate records when they are connected theough an array\n          def set_inverse(record)\n            return super unless connected_through_array? && @reflection.macro == :has_many\n\n            # Only the first owner is associated following the same instruction\n            # on the original implementation\n            convert_key(record[association_key_name])&.each do |key|\n              if owners = owners_by_key[key]\n                association = owners.first.association(reflection.name)\n                association.set_inverse_instance(record)\n              end\n            end\n          end\n\n          # Requires a slight change when running on has many since the value\n          # of the foreign key being an array\n          def load_records(raw_records = nil)\n            return super unless connected_through_array? && @reflection.macro == :has_many\n\n            @records_by_owner = {}.compare_by_identity\n            raw_records ||= loader_query.records_for([self])\n\n            @preloaded_records = raw_records.select do |record|\n              assignments = false\n\n              keys = convert_key(record[association_key_name]) || []\n              owners_by_key.values_at(*keys).each do |owner|\n                entries = (@records_by_owner[owner] ||= [])\n\n                if reflection.collection? || entries.empty?\n                  entries << record\n                  assignments = true\n                end\n              end\n\n              assignments\n            end\n          end\n\n          # Make sure to change the process when connected through an array\n          def owners_by_key\n            return super unless connected_through_array?\n            @owners_by_key ||= owners.each_with_object({}) do |owner, result|\n              Array.wrap(convert_key(owner[owner_key_name])).each do |key|\n                (result[key] ||= []) << owner\n              end\n            end\n          end\n\n          private\n\n            # Specific run for belongs_many association\n            def run_array_for_belongs_to_many\n              # Add reverse to has_many\n              records = groupped_records\n              owners.each do |owner|\n                items = records.values_at(*Array.wrap(owner[owner_key_name]))\n                associate_records_to_owner(owner, items.flatten)\n              end\n            end\n\n            # Specific run for has_many association\n            def run_array_for_has_many\n              # Add reverse to belongs_to_many\n              records = Hash.new { |h, k| h[k] = [] }\n              groupped_records.each do |ids, record|\n                ids.each { |id| records[id].concat(Array.wrap(record)) }\n              end\n\n              records.default_proc = nil\n              owners.each do |owner|\n                associate_records_to_owner(owner, records[owner[owner_key_name]] || [])\n              end\n            end\n\n            # Build correctly the constraint condition in order to get the\n            # associated ids\n            def records_for(ids, &block)\n              return super unless connected_through_array?\n              condition = scope.arel_table[association_key_name]\n              condition = reflection.build_id_constraint(condition, ids.flatten.uniq)\n              scope.where(condition).load(&block)\n            end\n\n            def associate_records_to_owner(owner, records)\n              return super unless connected_through_array?\n              association = owner.association(reflection.name)\n              association.loaded!\n              association.target.concat(records)\n            end\n\n            def groupped_records\n              preloaded_records.group_by do |record|\n                convert_key(record[association_key_name])\n              end\n            end\n        end\n\n        ::ActiveRecord::Associations::Preloader::Association.prepend(Association)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/preloader/loader_query.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Associations\n      module Preloader\n        module LoaderQuery\n          def foreign_column\n            @foreign_column ||= scope.columns_hash[association_key_name.to_s]\n          end\n\n          def load_records_for_keys(keys, &block)\n            condition = query_condition_for(keys)\n            return super if condition.nil?\n\n            scope.where(condition).load(&block)\n          end\n\n          def query_condition_for(keys)\n            return unless connected_through_array?\n\n            value = scope.cast_for_condition(foreign_column, keys.to_a)\n            scope.table[association_key_name].overlaps(value)\n          end\n\n          def connected_through_array?\n            !association_key_name.is_a?(Array) && foreign_column&.array?\n          end\n        end\n\n        ::ActiveRecord::Associations::Preloader::Association::LoaderQuery\n          .prepend(LoaderQuery)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/associations/preloader.rb",
    "content": "require_relative 'preloader/association'\nrequire_relative 'preloader/loader_query'\n"
  },
  {
    "path": "lib/torque/postgresql/associations.rb",
    "content": "require_relative 'associations/association_scope'\nrequire_relative 'associations/belongs_to_many_association'\nrequire_relative 'associations/foreign_association'\n\nrequire_relative 'associations/builder'\nrequire_relative 'associations/preloader'\n\nassociation_mod = Torque::PostgreSQL::Associations::ForeignAssociation\n::ActiveRecord::Associations::HasManyAssociation.prepend(association_mod)\n::ActiveRecord::Associations::BelongsToManyAssociation.prepend(association_mod)\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/builder/enum.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      module Builder\n        class Enum\n          VALID_TYPES = %i[enum enum_set].freeze\n          FN = '::Torque::PostgreSQL::FN'\n\n          attr_accessor :klass, :attribute, :subtype, :options, :values,\n            :klass_module, :instance_module\n\n          # Start a new builder of methods for enum values on ActiveRecord::Base\n          def initialize(klass, attribute, options)\n            @klass     = klass\n            @attribute = attribute.to_s\n            @subtype   = klass.attribute_types[@attribute]\n            @options   = options\n\n            raise Interrupt unless subtype.respond_to?(:klass)\n            @values    = subtype.klass.values\n\n            if @options[:only]\n              @values &= Array(@options[:only]).map(&:to_s)\n            end\n\n            if @options[:except]\n              @values -= Array(@options[:except]).map(&:to_s)\n            end\n          end\n\n          # Get the list of methods based on enum values\n          def values_methods\n            return @values_methods if defined?(@values_methods)\n\n            prefix = options.fetch(:prefix, nil)\n            suffix = options.fetch(:suffix, nil)\n\n            prefix = attribute if prefix == true\n            suffix = attribute if suffix == true\n\n            base   = [prefix, '%s', suffix].compact.join('_')\n\n            @values_methods = begin\n              values.map do |val|\n                key   = val.downcase.tr('- ', '__')\n                scope = base % key\n                ask   = scope + '?'\n                bang  = scope + '!'\n                [key, [scope, ask, bang, val]]\n              end.to_h\n            end\n          end\n\n          # Check if it's building the methods for sets\n          def set_features?\n            options[:set_features].present?\n          end\n\n          # Check if any of the methods that will be created get in conflict\n          # with the base class methods\n          def conflicting?\n            return if options[:force] == true\n            attributes = attribute.pluralize\n\n            dangerous?(attributes, true)\n            dangerous?(\"#{attributes}_keys\", true)\n            dangerous?(\"#{attributes}_texts\", true)\n            dangerous?(\"#{attributes}_options\", true)\n            dangerous?(\"#{attribute}_text\")\n\n            if set_features?\n              dangerous?(\"has_#{attributes}\", true)\n              dangerous?(\"has_any_#{attributes}\", true)\n            end\n\n            values_methods.each do |attr, (scope, ask, bang, *)|\n              dangerous?(scope, true)\n              dangerous?(bang)\n              dangerous?(ask)\n            end\n          rescue Interrupt => err\n            raise ArgumentError, <<-MSG.squish\n              Enum #{subtype.name} was not able to generate requested\n              methods because the method #{err} already exists in\n              #{klass.name}.\n            MSG\n          end\n\n          # Create all methods needed\n          def build\n            @klass_module = Module.new\n            @instance_module = Module.new\n\n            plural\n            stringify\n            all_values\n            set_scopes if set_features?\n\n            klass.extend klass_module\n            klass.include instance_module\n          end\n\n          private\n\n            # Check if the method already exists in the reference class\n            def dangerous?(method_name, class_method = false)\n              if class_method\n                if klass.dangerous_class_method?(method_name)\n                  raise Interrupt, method_name.to_s\n                end\n              else\n                if klass.dangerous_attribute_method?(method_name)\n                  raise Interrupt, method_name.to_s\n                end\n              end\n            rescue Interrupt => e\n              raise e if Torque::PostgreSQL.config.enum.raise_conflicting\n              type = class_method ? 'class method' : 'instance method'\n              indicator = class_method ? '.' : '#'\n\n              Torque::PostgreSQL.logger.info(<<~MSG.squish)\n                Creating #{class_method} :#{method_name} for enum.\n                Overwriting existing method #{klass.name}#{indicator}#{method_name}.\n              MSG\n            end\n\n            # Create the method that allow access to the list of values\n            def plural\n              enum_klass = subtype.klass.name\n              klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1\n                def #{attribute.pluralize}                                  # def roles\n                  ::#{enum_klass}.values                                    #   Enum::Roles.values\n                end                                                         # end\n\n                def #{attribute.pluralize}_keys                             # def roles_keys\n                  ::#{enum_klass}.keys                                      #   Enum::Roles.keys\n                end                                                         # end\n\n                def #{attribute.pluralize}_texts                            # def roles_texts\n                  ::#{enum_klass}.members.map do |member|                   #   Enum::Roles.members do |member|\n                    member.text('#{attribute}', self)                       #     member.text('role', self)\n                  end                                                       #   end\n                end                                                         # end\n\n                def #{attribute.pluralize}_options                          # def roles_options\n                  #{attribute.pluralize}_texts.zip(::#{enum_klass}.values)  #   roles_texts.zip(Enum::Roles.values)\n                end                                                         # end\n              RUBY\n            end\n\n            # Create additional methods when the enum is a set, which needs\n            # better ways to check if values are present or not\n            def set_scopes\n              cast_type = subtype.name.chomp('[]')\n              klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1\n                def has_#{attribute.pluralize}(*values)                     # def has_roles(*values)\n                  attr = arel_table['#{attribute}']                         #   attr = arel_table['role']\n                  value = #{FN}.bind_with(attr, values)                     #   value = ::Torque::PostgreSQL::FN.bind_with(attr, values)\n                  where(attr.contains(value.pg_cast('#{cast_type}[]')))     #   where(attr.contains(value.pg_cast('roles[]')))\n                end                                                         # end\n\n                def has_any_#{attribute.pluralize}(*values)                 # def has_any_roles(*values)\n                  attr = arel_table['#{attribute}']                         #   attr = arel_table['role']\n                  value = #{FN}.bind_with(attr, values)                     #   value = ::Torque::PostgreSQL::FN.bind_with(attr, values)\n                  where(attr.overlaps(value.pg_cast('#{cast_type}[]')))     #   where(attr.overlaps(value.pg_cast('roles[]')))\n                end                                                         # end\n              RUBY\n            end\n\n            # Create the method that turn the attribute value into text using\n            # the model scope\n            def stringify\n              instance_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1\n                def #{attribute}_text                      # def role_text\n                  #{attribute}.text('#{attribute}', self)  #   role.text('role', self)\n                end                                        # end\n              RUBY\n            end\n\n            # Create all the methods that represent actions related to the\n            # attribute value\n            def all_values\n              klass_content = ''\n              instance_content = ''\n              enum_klass = subtype.klass.name\n\n              values_methods.each do |key, (scope, ask, bang, val)|\n                klass_content += <<-RUBY\n                  def #{scope}                                    # def admin\n                    attr = arel_table['#{attribute}']             #   attr = arel_table['role']\n                    where(::#{enum_klass}.scope(attr, '#{val}'))  #   where(Enum::Roles.scope(attr, 'admin'))\n                  end                                             # end\n                RUBY\n\n                instance_content += <<-RUBY\n                  def #{ask}                                      # def admin?\n                    #{attribute}.#{key}?                          #   role.admin?\n                  end                                             # end\n\n                  def #{bang}                                     # admin!\n                    self.#{attribute} = '#{val}'                  #   self.role = 'admin'\n                    return unless #{attribute}_changed?           #   return unless role_changed?\n                    return save! if Torque::PostgreSQL.config.enum.save_on_bang\n                    true                                          #   true\n                  end                                             # end\n                RUBY\n              end\n\n              klass_module.module_eval(klass_content)\n              instance_module.module_eval(instance_content)\n            end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/builder/full_text_search.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      module Builder\n        class FullTextSearch\n          attr_accessor :klass, :attribute, :options, :klass_module,\n            :default_rank, :default_mode, :default_order, :default_language\n\n          def initialize(klass, attribute, options = {})\n            @klass = klass\n            @attribute = attribute\n            @options = options\n\n            @default_rank = options[:with_rank] == true ? 'rank' : options[:with_rank]&.to_s\n            @default_mode = options[:mode] || PostgreSQL.config.full_text_search.default_mode\n\n            @default_order =\n              case options[:order]\n              when :asc, true then :asc\n              when :desc then :desc\n              else false\n              end\n\n            @default_language = options[:language] if options[:language].is_a?(String) ||\n              options[:language].is_a?(Symbol)\n            @default_language ||= PostgreSQL.config.full_text_search.default_language.to_s\n          end\n\n          # What is the name of the scope to be added to the model\n          def scope_name\n            @scope_name ||= [\n              options[:prefix],\n              :full_text_search,\n              options[:suffix],\n            ].compact.join('_')\n          end\n\n          # Just check if the scope name is already defined\n          def conflicting?\n            return if options[:force] == true\n\n            if klass.dangerous_class_method?(scope_name)\n              raise Interrupt, scope_name.to_s\n            end\n          end\n\n          # Create the proper scope\n          def build\n            @klass_module = Module.new\n            add_scope_to_module\n            klass.extend klass_module\n          end\n\n          # Creates a class method as the scope that builds the full text search\n          def add_scope_to_module\n            klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1\n              def #{scope_name}(value#{scope_args})\n                attr = arel_table['#{attribute}']\n                fn = ::Torque::PostgreSQL::FN\n\n                lang = language.to_s if !language.is_a?(::Symbol)\n                lang ||= arel_table[language.to_s] if has_attribute?(language)\n                lang ||= public_send(language) if respond_to?(language)\n\n                function = {\n                  default: :to_tsquery,\n                  phrase: :phraseto_tsquery,\n                  plain: :plainto_tsquery,\n                  web: :websearch_to_tsquery,\n                }[mode.to_sym]\n\n                raise ::ArgumentError, <<~MSG.squish if lang.blank?\n                  Unable to determine language from \\#{language.inspect}.\n                MSG\n\n                raise ::ArgumentError, <<~MSG.squish if function.nil?\n                  Invalid mode \\#{mode.inspect} for full text search.\n                MSG\n\n                value = fn.bind(:value, value.to_s, attr.type_caster)\n                lang = fn.bind(:lang, lang, attr.type_caster) if lang.is_a?(::String)\n\n                query = fn.public_send(function, lang, value)\n                ranker = fn.ts_rank(attr, query) if rank || order\n\n                result = where(fn.infix(:\"@@\", attr, query))\n                result = result.order(ranker.public_send(order == :desc ? :desc : :asc)) if order\n                result.select_extra_values += [ranker.as(rank == true ? 'rank' : rank.to_s)] if rank\n                result\n              end\n            RUBY\n          end\n\n          # Returns the arguments to be used on the scope\n          def scope_args\n            args = +''\n            args << \", order: #{default_order.inspect}\"\n            args << \", rank: #{default_rank.inspect}\"\n            args << \", language: #{default_language.inspect}\"\n            args << \", mode: :#{default_mode}\"\n            args\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/builder/period.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      module Builder\n        class Period\n          DIRECT_ACCESS_REGEX = /_?%s_?/\n          SUPPORTED_TYPES = %i[daterange tsrange tstzrange].freeze\n          CURRENT_GETTERS = {\n            daterange: 'Date.current',\n            tsrange:   'Time.zone.now',\n            tstzrange: 'Time.zone.now',\n          }.freeze\n\n          TYPE_CASTERS = {\n            daterange: :date,\n            tsrange:   :timestamp,\n            tstzrange: :timestamp,\n          }.freeze\n\n          FN = '::Torque::PostgreSQL::FN'\n\n          attr_accessor :klass, :attribute, :options, :type, :default, :current_getter,\n            :type_caster, :threshold, :dynamic_threshold, :klass_module, :instance_module\n\n          # Start a new builder of methods for period values on\n          # ActiveRecord::Base\n          def initialize(klass, attribute, options)\n            @klass     = klass\n            @attribute = attribute.to_s\n            @options   = options\n            @type      = klass.attribute_types[@attribute].type\n\n            raise ArgumentError, <<-MSG.squish unless SUPPORTED_TYPES.include?(type)\n              Period cannot be generated for #{attribute} because its type\n              #{type} is not supported. Only #{SUPPORTED_TYPES.join(', ')} are supported.\n            MSG\n\n            @current_getter = CURRENT_GETTERS[type]\n            @type_caster    = TYPE_CASTERS[type]\n\n            @default        = options[:pessimistic].blank?\n          end\n\n          # Check if can identify a threshold field\n          def threshold\n            @threshold ||= begin\n              option = options[:threshold]\n              return if option.eql?(false)\n\n              unless option.eql?(true)\n                return option.is_a?(String) ? option.to_sym : option\n              end\n\n              attributes = klass.attribute_names\n              default_name = Torque::PostgreSQL.config.period.auto_threshold.to_s\n              raise ArgumentError, <<-MSG.squish unless attributes.include?(default_name)\n                Unable to find the #{default_name} to use as threshold for period\n                features for #{attribute} in #{klass.name} model.\n              MSG\n\n              check_type = klass.attribute_types[default_name].type\n              raise ArgumentError, <<-MSG.squish unless check_type.eql?(:interval)\n                The #{default_name} has the wrong type to be used as threshold.\n                Expected :interval got #{check_type.inspect} in #{klass.name} model.\n              MSG\n\n              default_name.to_sym\n            end\n          end\n\n          # Generate all the method names\n          def method_names\n            @method_names ||= default_method_names.merge(options.fetch(:methods, {}))\n          end\n\n          # Get the list of methods associated withe the class\n          def klass_method_names\n            @klass_method_names ||= method_names.to_a[0..22].to_h\n          end\n\n          # Get the list of methods associated withe the instances\n          def instance_method_names\n            @instance_method_names ||= method_names.to_a[23..29].to_h\n          end\n\n          # Check if any of the methods that will be created get in conflict\n          # with the base class methods\n          def conflicting?\n            return if options[:force] == true\n\n            klass_method_names.values.each { |name| dangerous?(name, true) }\n            instance_method_names.values.each { |name| dangerous?(name) }\n          rescue Interrupt => err\n            raise ArgumentError, <<-MSG.squish\n              #{subtype.class.name} was not able to generate requested\n              methods because the method #{err} already exists in\n              #{klass.name}.\n            MSG\n          end\n\n          # Create all methods needed\n          def build\n            @klass_module = Module.new\n            @instance_module = Module.new\n\n            value_args      = ['value']\n            left_right_args = ['left', 'right = nil']\n\n            ## Klass methods\n            build_method_helper :klass, :current_on,                 value_args            # 00\n            build_method_helper :klass, :current                                           # 01\n            build_method_helper :klass, :not_current                                       # 02\n            build_method_helper :klass, :containing,                 value_args            # 03\n            build_method_helper :klass, :not_containing,             value_args            # 04\n            build_method_helper :klass, :overlapping,                left_right_args       # 05\n            build_method_helper :klass, :not_overlapping,            left_right_args       # 06\n            build_method_helper :klass, :starting_after,             value_args            # 07\n            build_method_helper :klass, :starting_before,            value_args            # 08\n            build_method_helper :klass, :finishing_after,            value_args            # 09\n            build_method_helper :klass, :finishing_before,           value_args            # 10\n\n            if threshold.present?\n              build_method_helper :klass, :real_containing,          value_args            # 11\n              build_method_helper :klass, :real_overlapping,         left_right_args       # 12\n              build_method_helper :klass, :real_starting_after,      value_args            # 13\n              build_method_helper :klass, :real_starting_before,     value_args            # 14\n              build_method_helper :klass, :real_finishing_after,     value_args            # 15\n              build_method_helper :klass, :real_finishing_before,    value_args            # 16\n            end\n\n            unless type.eql?(:daterange)\n              build_method_helper :klass, :containing_date,          value_args            # 17\n              build_method_helper :klass, :not_containing_date,      value_args            # 18\n              build_method_helper :klass, :overlapping_date,         left_right_args       # 19\n              build_method_helper :klass, :not_overlapping_date,     left_right_args       # 20\n\n              if threshold.present?\n                build_method_helper :klass, :real_containing_date,   value_args            # 21\n                build_method_helper :klass, :real_overlapping_date,  left_right_args       # 22\n              end\n            end\n\n            ## Instance methods\n            build_method_helper :instance, :current?                                       # 23\n            build_method_helper :instance, :current_on?,             value_args            # 24\n            build_method_helper :instance, :start                                          # 25\n            build_method_helper :instance, :finish                                         # 26\n\n            if threshold.present?\n              build_method_helper :instance, :real                                         # 27\n              build_method_helper :instance, :real_start                                   # 28\n              build_method_helper :instance, :real_finish                                  # 29\n            end\n\n            klass.extend klass_module\n            klass.include instance_module\n          end\n\n          def build_method_helper(type, key, args = [])\n            method_name = method_names[key]\n            return if method_name.nil?\n\n            method_content = send(\"#{type}_#{key}\")\n            method_content = define_string_method(method_name, method_content, args)\n\n            source_module = send(\"#{type}_module\")\n            source_module.module_eval(method_content)\n          end\n\n          private\n\n            # Generates the default method names\n            def default_method_names\n              list = Torque::PostgreSQL.config.period.method_names.dup\n\n              if options.fetch(:prefixed, true)\n                list.transform_values { |value| format(value, attribute) }\n              else\n                list = list.merge(Torque::PostgreSQL.config.period.direct_method_names)\n                list.transform_values { |value| value.gsub(DIRECT_ACCESS_REGEX, '') }\n              end\n            end\n\n            # Check if the method already exists in the reference class\n            def dangerous?(method_name, class_method = false)\n              if class_method\n                if klass.dangerous_class_method?(method_name)\n                  raise Interrupt, method_name.to_s\n                end\n              else\n                if klass.dangerous_attribute_method?(method_name)\n                  raise Interrupt, method_name.to_s\n                end\n              end\n            end\n\n            ## BUILDER HELPERS\n            def define_string_method(name, body, args = [])\n              headline = \"def #{name}\"\n              headline += \"(#{args.join(', ')})\"\n              [headline, body, 'end'].join(\"\\n\")\n            end\n\n            def arel_attribute\n              @arel_attribute ||= \"arel_table[#{attribute.inspect}]\"\n            end\n\n            def arel_default_sql\n              @arel_default_sql ||= arel_sql_bind(@default.inspect)\n            end\n\n            def arel_sql_bind(value)\n              \"#{FN}.bind_with(#{arel_attribute}, #{value})\"\n            end\n\n            # Check how to provide the threshold value\n            def arel_threshold_value\n              @arel_threshold_value ||= begin\n                case threshold\n                when Symbol, String\n                  \"arel_table['#{threshold}']\"\n                when ActiveSupport::Duration\n                  value = \"'#{threshold.to_i} seconds'\"\n                  \"::Arel.sql(\\\"#{value}\\\").pg_cast(:interval)\"\n                when Numeric\n                  value = threshold.to_i.to_s\n                  value << type_caster.eql?(:date) ? ' days' : ' seconds'\n                  value = \"'#{value}'\"\n                  \"::Arel.sql(\\\"#{value}\\\").pg_cast(:interval)\"\n                end\n              end\n            end\n\n            # Start at version of the value\n            def arel_start_at\n              @arel_start_at ||= arel_named_function('lower', arel_attribute)\n            end\n\n            # Finish at version of the value\n            def arel_finish_at\n              @arel_finish_at ||= arel_named_function('upper', arel_attribute)\n            end\n\n            # Start at version of the value with threshold\n            def arel_real_start_at\n              return arel_start_at unless threshold.present?\n              @arel_real_start_at ||= begin\n                result = +\"(#{arel_start_at} - #{arel_threshold_value})\"\n                result << '.pg_cast(:date)' if type.eql?(:daterange)\n                result\n              end\n            end\n\n            # Finish at version of the value with threshold\n            def arel_real_finish_at\n              return arel_finish_at unless threshold.present?\n              @arel_real_finish_at ||= begin\n                result = +\"(#{arel_finish_at} + #{arel_threshold_value})\"\n                result << '.pg_cast(:date)' if type.eql?(:daterange)\n                result\n              end\n            end\n\n            # When the time has a threshold, then the real attribute is complex\n            def arel_real_attribute\n              return arel_attribute unless threshold.present?\n              @arel_real_attribute ||= arel_named_function(\n                type, arel_real_start_at, arel_real_finish_at,\n              )\n            end\n\n            # Create an arel version of the type with the following values\n            def arel_convert_to_type(left, right = nil, set_type = nil)\n              arel_named_function(set_type || type, left, right || left)\n            end\n\n            # Create an arel named function\n            def arel_named_function(name, *args)\n              result = +\"#{FN}.#{name}\"\n              result << '(' << args.join(', ') << ')' if args.present?\n              result\n            end\n\n            # Create an arel version of +nullif+ function\n            def arel_nullif(*args)\n              arel_named_function('nullif', *args)\n            end\n\n            # Create an arel version of +coalesce+ function\n            def arel_coalesce(*args)\n              arel_named_function('coalesce', *args)\n            end\n\n            # Create an arel version of an empty value for the range\n            def arel_empty_value\n              arel_convert_to_type('::Arel.sql(\\'NULL\\')')\n            end\n\n            # Convert timestamp range to date range format\n            def arel_daterange(real = false)\n              arel_named_function(\n                'daterange',\n                (real ? arel_real_start_at : arel_start_at) + '.pg_cast(:date)',\n                (real ? arel_real_finish_at : arel_finish_at) + '.pg_cast(:date)',\n                '::Arel.sql(\"\\'[]\\'\")',\n              )\n            end\n\n            def arel_check_condition(type)\n              checker = arel_nullif(arel_real_attribute, arel_empty_value)\n              checker << \".#{type}(value.pg_cast(#{type_caster.inspect}))\"\n              arel_coalesce(checker, arel_default_sql)\n            end\n\n            def arel_formatting_value(condition = nil, value = 'value', cast: nil)\n              [\n                \"#{value} = arel_table[#{value}] if #{value}.is_a?(Symbol)\",\n                \"unless #{value}.respond_to?(:pg_cast)\",\n                \"  #{value} = #{FN}.bind_with(#{arel_attribute}, #{value})\",\n                (\"  #{value} = #{value}.pg_cast(#{cast.inspect})\" if cast),\n                'end',\n                condition,\n              ].compact.join(\"\\n\")\n            end\n\n            def arel_formatting_left_right(condition, set_type = nil, cast: nil)\n              [\n                arel_formatting_value(nil, 'left', cast: cast),\n                '',\n                'if right.present?',\n                '  ' + arel_formatting_value(nil, 'right', cast: cast),\n                \"  value = #{arel_convert_to_type('left', 'right', set_type)}\",\n                'else',\n                '  value = left',\n                'end',\n                '',\n                condition,\n              ].join(\"\\n\")\n            end\n\n            ## METHOD BUILDERS\n            def klass_current_on\n              arel_formatting_value(\"where(#{arel_check_condition(:contains)})\")\n            end\n\n            def klass_current\n              [\n                \"value = #{arel_sql_bind(current_getter)}\",\n                \"where(#{arel_check_condition(:contains)})\",\n              ].join(\"\\n\")\n            end\n\n            def klass_not_current\n              [\n                \"value = #{arel_sql_bind(current_getter)}\",\n                \"where.not(#{arel_check_condition(:contains)})\",\n              ].join(\"\\n\")\n            end\n\n            def klass_containing\n              arel_formatting_value(\"where(#{arel_attribute}.contains(value))\")\n            end\n\n            def klass_not_containing\n              arel_formatting_value(\"where.not(#{arel_attribute}.contains(value))\")\n            end\n\n            def klass_overlapping\n              arel_formatting_left_right(\"where(#{arel_attribute}.overlaps(value))\")\n            end\n\n            def klass_not_overlapping\n              arel_formatting_left_right(\"where.not(#{arel_attribute}.overlaps(value))\")\n            end\n\n            def klass_starting_after\n              arel_formatting_value(\"where((#{arel_start_at}).gt(value))\")\n            end\n\n            def klass_starting_before\n              arel_formatting_value(\"where((#{arel_start_at}).lt(value))\")\n            end\n\n            def klass_finishing_after\n              arel_formatting_value(\"where((#{arel_finish_at}).gt(value))\")\n            end\n\n            def klass_finishing_before\n              arel_formatting_value(\"where((#{arel_finish_at}).lt(value))\")\n            end\n\n            def klass_real_containing\n              arel_formatting_value(\"where(#{arel_real_attribute}.contains(value))\")\n            end\n\n            def klass_real_overlapping\n              arel_formatting_left_right(\"where(#{arel_real_attribute}.overlaps(value))\")\n            end\n\n            def klass_real_starting_after\n              arel_formatting_value(\"where(#{arel_real_start_at}.gt(value))\")\n            end\n\n            def klass_real_starting_before\n              arel_formatting_value(\"where(#{arel_real_start_at}.lt(value))\")\n            end\n\n            def klass_real_finishing_after\n              arel_formatting_value(\"where(#{arel_real_finish_at}.gt(value))\")\n            end\n\n            def klass_real_finishing_before\n              arel_formatting_value(\"where(#{arel_real_finish_at}.lt(value))\")\n            end\n\n            def klass_containing_date\n              arel_formatting_value(\"where(#{arel_daterange}.contains(value))\",\n                cast: :date)\n            end\n\n            def klass_not_containing_date\n              arel_formatting_value(\"where.not(#{arel_daterange}.contains(value))\",\n                cast: :date)\n            end\n\n            def klass_overlapping_date\n              arel_formatting_left_right(\"where(#{arel_daterange}.overlaps(value))\",\n                :daterange, cast: :date)\n            end\n\n            def klass_not_overlapping_date\n              arel_formatting_left_right(\"where.not(#{arel_daterange}.overlaps(value))\",\n                :daterange, cast: :date)\n            end\n\n            def klass_real_containing_date\n              arel_formatting_value(\"where(#{arel_daterange(true)}.contains(value))\",\n                cast: :date)\n            end\n\n            def klass_real_overlapping_date\n              arel_formatting_left_right(\"where(#{arel_daterange(true)}.overlaps(value))\",\n                :daterange, cast: :date)\n            end\n\n            def instance_current?\n              \"#{method_names[:current_on?]}(#{current_getter})\"\n            end\n\n            def instance_current_on?\n              attr_value = threshold.present? ? method_names[:real] : attribute\n              default_value = default.inspect\n\n              [\n                \"return #{default_value} if #{attr_value}.nil?\",\n                \"(#{attr_value}.min.try(:infinite?) || #{attr_value}.min <= value) &&\",\n                \"  (#{attr_value}.max.try(:infinite?) || #{attr_value}.max > value)\",\n              ].join(\"\\n\")\n            end\n\n            def instance_start\n              \"#{attribute}&.min\"\n            end\n\n            def instance_finish\n              \"#{attribute}&.max\"\n            end\n\n            def instance_real\n              left = method_names[:real_start]\n              right = method_names[:real_finish]\n\n              [\n                \"left = #{left}\",\n                \"right = #{right}\",\n                'return unless left || right',\n                '((left || -::Float::INFINITY)..(right || ::Float::INFINITY))',\n              ].join(\"\\n\")\n            end\n\n            def instance_real_start\n              suffix = type.eql?(:daterange) ? '.to_date' : ''\n              threshold_value = threshold.is_a?(Symbol) \\\n                ? threshold.to_s \\\n                : threshold.to_i.to_s + '.seconds'\n\n              [\n                \"return if #{method_names[:start]}.nil?\",\n                \"value = #{method_names[:start]}\",\n                \"value -= (#{threshold_value} || 0)\",\n                \"value#{suffix}\"\n              ].join(\"\\n\")\n            end\n\n            def instance_real_finish\n              suffix = type.eql?(:daterange) ? '.to_date' : ''\n              threshold_value = threshold.is_a?(Symbol) \\\n                ? threshold.to_s \\\n                : threshold.to_i.to_s + '.seconds'\n\n              [\n                \"return if #{method_names[:finish]}.nil?\",\n                \"value = #{method_names[:finish]}\",\n                \"value += (#{threshold_value} || 0)\",\n                \"value#{suffix}\"\n              ].join(\"\\n\")\n            end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/builder.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'builder/enum'\nrequire_relative 'builder/period'\nrequire_relative 'builder/full_text_search'\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      module Builder\n        def self.include_on(klass, method_name, builder_klass, **extra, &block)\n          klass.define_singleton_method(method_name) do |*args, **options|\n            return unless table_exists?\n\n            args.each do |attribute|\n              # Generate methods on self class\n              builder = builder_klass.new(self, attribute, extra.merge(options))\n              builder.conflicting?\n              builder.build\n\n              # Additional settings for the builder\n              instance_exec(builder, &block) if block.present?\n            rescue Interrupt\n              # Not able to build the attribute, maybe pending migrations\n            end\n          end\n        end\n\n        def self.search_vector_options(columns:, language: nil, stored: true, **options)\n          weights = to_search_weights(columns)\n          operation = to_search_vector_operation(language, weights).to_sql\n\n          options[:index] = {\n            using: PostgreSQL.config.full_text_search.default_index_type,\n          } if options[:index] == true\n\n          options.merge(type: :tsvector, as: operation, stored: stored)\n        end\n\n        def self.to_search_weights(columns)\n          if !columns.is_a?(Hash)\n            extras = columns.size > 3 ? columns.size - 3 : 0\n            weights = %w[A B C] + (['D'] * extras)\n            columns = Array.wrap(columns).zip(weights).to_h\n          end\n\n          columns.transform_keys(&:to_s)\n        end\n\n        def self.to_search_vector_operation(language, weights)\n          language ||= PostgreSQL.config.full_text_search.default_language\n          language = ::Arel.sql(language.is_a?(Symbol) ? language.to_s : \"'#{language}'\")\n          simple = weights.size == 1\n\n          empty_string = ::Arel.sql(\"''\")\n          operations = weights.map do |column, weight|\n            column = ::Arel.sql(column.to_s)\n            weight = ::Arel.sql(\"'#{weight}'\")\n\n            op = FN.to_tsvector(language, FN.coalesce(column, empty_string))\n            op = FN.setweight(op, weight) unless simple\n            op\n          end\n\n          FN.concat(*operations)\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/enum.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      class Enum < String\n        include Comparable\n\n        class EnumError < ArgumentError; end\n\n        LAZY_VALUE = 0.chr\n\n        class << self\n          include Enumerable\n\n          delegate :each, :sample, :size, :length, to: :members\n\n          # Find or create the class that will handle the value\n          def lookup(name)\n            const     = name.to_s.camelize\n            namespace = PostgreSQL.config.enum.namespace\n\n            return namespace.const_get(const) if namespace.const_defined?(const)\n            namespace.const_set(const, Class.new(Enum))\n          end\n\n          # Provide a method on the given class to setup which enums will be\n          # manually initialized\n          def include_on(klass, method_name = nil)\n            method_name ||= PostgreSQL.config.enum.base_method\n            Builder.include_on(klass, method_name, Builder::Enum) do |builder|\n              defined_enums[builder.attribute.to_s] = builder.subtype.klass\n            end\n          end\n\n          # Overpass new so blank values return only nil\n          def new(value)\n            return Lazy.new(self, LAZY_VALUE) if value.blank?\n            super\n          end\n\n          # Load the list of values in a lazy way\n          def values\n            @values ||= self == Enum ? nil : begin\n              connection.enum_values(type_name).freeze\n            end\n          end\n\n          # List of values as symbols\n          def keys\n            values.map(&:to_sym)\n          end\n\n          # Different from values, it returns the list of items already casted\n          def members\n            values.map(&method(:new))\n          end\n\n          # Get the list of the values translated by I18n\n          def texts\n            members.map(&:text)\n          end\n\n          # Get a list of values translated and ready for select\n          def to_options\n            texts.zip(values)\n          end\n\n          # Fetch a value from the list\n          # see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/fixtures.rb#L656\n          # see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/validations/uniqueness.rb#L101\n          def fetch(value, *)\n            new(value.to_s) if values.include?(value)\n          end\n          alias [] fetch\n\n          # Get the type name from its class name\n          def type_name\n            @type_name ||= self.name.demodulize.underscore\n          end\n\n          # Check if the value is valid\n          def valid?(value)\n            return false if self == Enum\n            return true if value.equal?(LAZY_VALUE)\n            self.values.include?(value.to_s)\n          end\n\n          # Build an active record scope for a given attribute against a value\n          def scope(attribute, value)\n            attribute.eq(value)\n          end\n\n          private\n\n            # Allows checking value existance\n            def respond_to_missing?(method_name, include_private = false)\n              valid?(method_name) || super\n            end\n\n            # Allow fast creation of values\n            def method_missing(method_name, *arguments)\n              return super if self == Enum\n              valid?(method_name) ? new(method_name.to_s) : super\n            end\n\n            # Get a connection based on its name\n            def connection\n              ::ActiveRecord::Base.connection\n            end\n\n        end\n\n        # Override string initializer to check for a valid value\n        def initialize(value)\n          str_value = value.is_a?(Numeric) ? self.class.values[value.to_i] : value.to_s\n          raise_invalid(value) unless self.class.valid?(str_value)\n          super(str_value)\n        end\n\n        # Allow comparison between values of the same enum\n        def <=>(other)\n          raise_comparison(other) if other.is_a?(Enum) && other.class != self.class\n\n          case other\n          when Numeric, Enum  then to_i <=> other.to_i\n          when String, Symbol then to_i <=> self.class.values.index(other.to_s)\n          else raise_comparison(other)\n          end\n        end\n\n        # Only allow value comparison with values of the same class\n        def ==(other)\n          (self <=> other) == 0\n        rescue EnumError\n          false\n        end\n        alias eql? ==\n\n        # Since it can have a lazy value, nil can be true here\n        def nil?\n          self == LAZY_VALUE\n        end\n        alias empty? nil?\n\n        # It only accepts if the other value is valid\n        def replace(value)\n          raise_invalid(value) unless self.class.valid?(value)\n          super\n        end\n\n        # Get a translated version of the value\n        def text(attr = nil, model = nil)\n          keys = i18n_keys(attr, model) << self.underscore.humanize\n          ::I18n.translate(keys.shift, default: keys)\n        end\n\n        # Change the string result for lazy value\n        def to_s\n          nil? ? '' : super\n        end\n\n        # Get the index of the value\n        def to_i\n          self.class.values.index(self)\n        end\n\n        # Change the inspection to show the enum name\n        def inspect\n          nil? ? 'nil' : \":#{to_s}\"\n        end\n\n        private\n\n          # Get the i18n keys to check\n          def i18n_keys(attr = nil, model = nil)\n            values = { type: self.class.type_name, value: to_s }\n            list_from = :i18n_type_scopes\n\n            if attr && model\n              values[:attr] = attr\n              values[:model] = model.model_name.i18n_key\n              list_from = :i18n_scopes\n            end\n\n            PostgreSQL.config.enum.send(list_from).map do |key|\n              (key % values).to_sym\n            end\n          end\n\n          # Check for valid '?' and '!' methods\n          def respond_to_missing?(method_name, include_private = false)\n            name = method_name.to_s\n\n            return true if name.chomp!('?')\n            name.chomp!('!') && self.class.valid?(name)\n          end\n\n          # Allow '_' to be associated to '-'\n          def method_missing(method_name, *arguments)\n            name = method_name.to_s\n\n            if name.chomp!('?')\n              self == name\n            elsif name.chomp!('!')\n              replace(name) unless self == name\n            else\n              super\n            end\n          end\n\n          # Throw an exception for invalid values\n          def raise_invalid(value)\n            if value.is_a?(Numeric)\n              raise EnumError, \"#{value.inspect} is out of bounds of #{self.class.name}\"\n            else\n              raise EnumError, \"#{value.inspect} is not valid for #{self.class.name}\"\n            end\n          end\n\n          # Throw an exception for comparison between different enums\n          def raise_comparison(other)\n            raise EnumError, \"Comparison of #{self.class.name} with #{self.inspect} failed\"\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/enum_set.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      class EnumSet < Set\n        include Comparable\n\n        class EnumSetError < Enum::EnumError; end\n\n        class << self\n          include Enumerable\n\n          delegate :each, to: :members\n          delegate :values, :keys, :members, :texts, :to_options, :valid?, :size,\n            :length, :connection_specification_name, to: :enum_source\n\n          # Find or create the class that will handle the value\n          def lookup(name, enum_klass)\n            const     = name.to_s.camelize + 'Set'\n            namespace = PostgreSQL.config.enum.namespace\n\n            return namespace.const_get(const) if namespace.const_defined?(const)\n\n            klass = Class.new(EnumSet)\n            klass.const_set('EnumSource', enum_klass)\n            namespace.const_set(const, klass)\n          end\n\n          # Provide a method on the given class to setup which enum sets will be\n          # manually initialized\n          def include_on(klass, method_name = nil)\n            method_name ||= PostgreSQL.config.enum.set_method\n            Builder.include_on(klass, method_name, Builder::Enum, set_features: true) do |builder|\n              defined_enums[builder.attribute.to_s] = builder.subtype\n            end\n          end\n\n          # The original Enum implementation, for individual values\n          def enum_source\n            const_get('EnumSource')\n          end\n\n          # Use the power to get a sample of the value\n          def sample\n            new(rand(0..((2 ** size) - 1)))\n          end\n\n          # Overpass new so blank values return only nil\n          def new(*values)\n            return Lazy.new(self, []) if values.compact.blank?\n            super\n          end\n\n          # Get the type name from its class name\n          def type_name\n            @type_name ||= enum_source.type_name + '[]'\n          end\n\n          # Fetch a value from the list\n          # see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/fixtures.rb#L656\n          # see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/validations/uniqueness.rb#L101\n          def fetch(value, *)\n            new(value.to_s) if values.include?(value)\n          end\n          alias [] fetch\n\n          # Get the power, 2 ** index, of each element\n          def power(*values)\n            values.flatten.map do |item|\n              item = item.to_i if item.is_a?(Enum)\n              item = values.index(item) unless item.is_a?(Numeric)\n\n              next 0 if item.nil? || item >= size\n              2 ** item\n            end.reduce(:+)\n          end\n\n          # Build an active record scope for a given attribute against a value\n          def scope(attribute, value)\n            attribute.contains(FN.bind_with(attribute, value).pg_cast(type_name))\n          end\n\n          private\n\n            # Allows checking value existence\n            def respond_to_missing?(method_name, include_private = false)\n              valid?(method_name) || super\n            end\n\n            # Allow fast creation of values\n            def method_missing(method_name, *arguments)\n              return super if self == Enum\n              valid?(method_name) ? new(method_name.to_s) : super\n            end\n        end\n\n        # Override string initializer to check for a valid value\n        def initialize(*values)\n          items =\n            if values.size === 1 && values.first.is_a?(Numeric)\n              transform_power(values.first)\n            else\n              transform_values(values)\n            end\n\n          @hash = items.zip(Array.new(items.size, true)).to_h\n        end\n\n        # Allow comparison between values of the same enum\n        def <=>(other)\n          raise_comparison(other) if other.is_a?(EnumSet) && other.class != self.class\n\n          to_i <=>\n            case other\n            when Numeric, EnumSet then other.to_i\n            when String, Symbol   then self.class.power(other.to_s)\n            when Array, Set       then self.class.power(*other)\n            else raise_comparison(other)\n            end\n        end\n\n        # Only allow value comparison with values of the same class\n        def ==(other)\n          (self <=> other) == 0\n        rescue EnumSetError\n          false\n        end\n        alias eql? ==\n\n        # It only accepts if the other value is valid\n        def replace(*values)\n          super(transform_values(values))\n        end\n\n        # Get a translated version of the value\n        def text(attr = nil, model = nil)\n          map { |item| item.text(attr, model) }.to_sentence\n        end\n        alias to_s text\n\n        # Get the index of the value\n        def to_i\n          self.class.power(@hash.keys)\n        end\n\n        # Change the inspection to show the enum name\n        def inspect\n          \"[#{map(&:inspect).join(', ')}]\"\n        end\n\n        # Replace the setter by instantiating the value\n        def []=(key, value)\n          super(key, instantiate(value))\n        end\n\n        # Override the merge method to ensure formatted values\n        def merge(other)\n          super other.map(&method(:instantiate))\n        end\n\n        # Override bitwise & operator to ensure formatted values\n        def &(other)\n          other = other.entries.map(&method(:instantiate))\n          values = @hash.keys.select { |k| other.include?(k) }\n          self.class.new(values)\n        end\n\n        # Operations that requries the other values to be transformed as well\n        %i[add delete include? subtract].each do |method_name|\n          define_method(method_name) do |other|\n            other =\n              if other.is_a?(self.class)\n                other\n              elsif other.is_a?(::Enumerable)\n                other.map(&method(:instantiate))\n              else\n                instantiate(other)\n              end\n\n            super(other)\n          end\n        end\n\n        private\n\n          # Create a new enum instance of the value\n          def instantiate(value)\n            value.is_a?(self.class.enum_source) ? value : self.class.enum_source.new(value)\n          end\n\n          # Turn a binary (power) definition into real values\n          def transform_power(value)\n            list = value.to_s(2).reverse.chars.map.with_index do |item, idx|\n              next idx if item.eql?('1')\n            end\n\n            raise raise_invalid(value) if list.size > self.class.size\n            self.class.members.values_at(*list.compact)\n          end\n\n          # Turn all the values into their respective Enum representations\n          def transform_values(values)\n            values = values.first if values.size.eql?(1) && values.first.is_a?(::Enumerable)\n            values.map(&method(:instantiate)).reject(&:nil?)\n          end\n\n          # Check for valid '?' and '!' methods\n          def respond_to_missing?(method_name, include_private = false)\n            name = method_name.to_s\n\n            return true if name.chomp!('?')\n            name.chomp!('!') && self.class.valid?(name)\n          end\n\n          # Allow '_' to be associated to '-'\n          def method_missing(method_name, *arguments)\n            name = method_name.to_s\n\n            if name.chomp!('?')\n              include?(name)\n            elsif name.chomp!('!')\n              add(name) unless include?(name)\n            else\n              super\n            end\n          end\n\n          # Throw an exception for invalid values\n          def raise_invalid(value)\n            if value.is_a?(Numeric)\n              raise EnumSetError, \"#{value.inspect} is out of bounds of #{self.class.name}\"\n            else\n              raise EnumSetError, \"#{value.inspect} is not valid for #{self.class.name}\"\n            end\n          end\n\n          # Throw an exception for comparison between different enums\n          def raise_comparison(other)\n            raise EnumSetError, \"Comparison of #{self.class.name} with #{self.inspect} failed\"\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/full_text_search.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      # For now, full text search doesn't have it's own class\n      module FullTextSearch\n        class << self\n          # Provide a method on the given class to setup which full text search\n          # columns will be manually initialized\n          def include_on(klass, method_name = nil)\n            method_name ||= PostgreSQL.config.full_text_search.base_method\n            Builder.include_on(klass, method_name, Builder::FullTextSearch)\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/lazy.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      class Lazy < BasicObject\n\n        def initialize(klass, *values)\n          @klass, @values = klass, values\n        end\n\n        def ==(other)\n          other.nil?\n        end\n\n        def nil?\n          true\n        end\n\n        def inspect\n          'nil'\n        end\n\n        def __class__\n          Lazy\n        end\n\n        def method_missing(name, *args, &block)\n          @klass.new(*@values).send(name, *args, &block)\n        end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes/period.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Attributes\n      # For now, period doesn't have it's own class\n      module Period\n        class << self\n          # Provide a method on the given class to setup which period columns\n          # will be manually initialized\n          def include_on(klass, method_name = nil)\n            method_name ||= PostgreSQL.config.period.base_method\n            Builder.include_on(klass, method_name, Builder::Period)\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/attributes.rb",
    "content": "require_relative 'attributes/lazy'\nrequire_relative 'attributes/builder'\n"
  },
  {
    "path": "lib/torque/postgresql/autosave_association.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module AutosaveAssociation\n      module ClassMethods\n        # Since belongs to many is a collection, the callback would normally go\n        # to +after_create+. However, since it is a +belongs_to+ kind of\n        # association, it neds to be executed +before_save+\n        def add_autosave_association_callbacks(reflection)\n          return super unless reflection.macro.eql?(:belongs_to_many)\n\n          save_method = :\"autosave_associated_records_for_#{reflection.name}\"\n          define_non_cyclic_method(save_method) do\n            save_belongs_to_many_association(reflection)\n          end\n\n          before_save(save_method)\n\n          define_autosave_validation_callbacks(reflection)\n        end\n      end\n\n      # Ensure the right way to execute +save_collection_association+ and also\n      # keep it as a single change using +build_changes+\n      def save_belongs_to_many_association(reflection)\n        previously_new_record_before_save = (@new_record_before_save ||= false)\n        @new_record_before_save = new_record?\n\n        association = association_instance_get(reflection.name)\n        association&.build_changes { save_collection_association(reflection) }\n      rescue ::ActiveRecord::RecordInvalid\n        throw(:abort)\n      ensure\n        @new_record_before_save = previously_new_record_before_save\n      end\n    end\n\n    ::ActiveRecord::Base.singleton_class.prepend(AutosaveAssociation::ClassMethods)\n    ::ActiveRecord::Base.include(AutosaveAssociation)\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/auxiliary_statement/recursive.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class AuxiliaryStatement\n      class Recursive < AuxiliaryStatement\n        # Setup any additional option in the recursive mode\n        def initialize(*, **options)\n          super\n\n          @connect = options[:connect]&.to_a&.first\n          @union_all = options[:union_all]\n          @sub_query = options[:sub_query]\n\n          if options.key?(:with_depth)\n            @depth = options[:with_depth].values_at(:name, :start, :as)\n            @depth[0] ||= 'depth'\n          end\n\n          if options.key?(:with_path)\n            @path = options[:with_path].values_at(:name, :source, :as)\n            @path[0] ||= 'path'\n          end\n        end\n\n        private\n\n          # Build the string or arel query\n          def build_query(base)\n            # Expose columns and get the list of the ones for select\n            columns = expose_columns(base, @query.try(:arel_table))\n            sub_columns = columns.dup\n            type = @union_all.present? ? 'all' : ''\n\n            # Build any extra columns that are dynamic and from the recursion\n            extra_columns(base, columns, sub_columns)\n\n            # Prepare the query depending on its type\n            if @query.is_a?(String) && @sub_query.is_a?(String)\n              args = @args.each_with_object({}) { |h, (k, v)| h[k] = base.connection.quote(v) }\n              ::Arel.sql(\"(#{@query} UNION #{type.upcase} #{@sub_query})\" % args)\n            elsif relation_query?(@query)\n              @query = @query.where(@where) if @where.present?\n              @bound_attributes.concat(@query.send(:bound_attributes))\n\n              if relation_query?(@sub_query)\n                @bound_attributes.concat(@sub_query.send(:bound_attributes))\n\n                sub_query = @sub_query.select(*sub_columns).arel\n                sub_query.from([@sub_query.arel_table, table])\n              else\n                sub_query = ::Arel.sql(@sub_query)\n              end\n\n              @query.select(*columns).arel.union(type, sub_query)\n            else\n              raise ArgumentError, <<-MSG.squish\n                Only String and ActiveRecord::Base objects are accepted as query and sub query\n                objects, #{@query.class.name} given for #{self.class.name}.\n              MSG\n            end\n          end\n\n          # Setup the statement using the class configuration\n          def prepare(base, settings)\n            super\n\n            prepare_sub_query(base, settings)\n          end\n\n          # Make sure that both parts of the union are ready\n          def prepare_sub_query(base, settings)\n            @union_all = settings.union_all if @union_all.nil?\n            @sub_query ||= settings.sub_query\n            @depth ||= settings.depth\n            @path ||= settings.path\n\n            # Collect the connection\n            @connect ||= settings.connect || begin\n              key = base.primary_key\n              [key.to_sym, :\"parent_#{key}\"] unless key.nil?\n            end\n\n            raise ArgumentError, <<-MSG.squish if @sub_query.nil? && @query.is_a?(String)\n              Unable to generate sub query from a string query. Please provide a `sub_query`\n              property on the \"#{table_name}\" settings.\n            MSG\n\n            if @sub_query.nil?\n              raise ArgumentError, <<-MSG.squish if @connect.blank?\n                Unable to generate sub query without setting up a proper way to connect it\n                with the main query. Please provide a `connect` property on the \"#{table_name}\"\n                settings.\n              MSG\n\n              left, right = @connect.map(&:to_s)\n              condition = @query.arel_table[right].eq(table[left])\n\n              if @query.where_values_hash.key?(right)\n                @sub_query = @query.unscope(where: right.to_sym).where(condition)\n              else\n                @sub_query = @query.where(condition)\n                @query = @query.where(right => nil)\n              end\n            elsif @sub_query.respond_to?(:call)\n              # Call a proc to get the real sub query\n              call_args = @sub_query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]\n              @sub_query = @sub_query.call(*call_args)\n            end\n          end\n\n          # Add depth and path if they were defined in settings\n          def extra_columns(base, columns, sub_columns)\n            return if @query.is_a?(String) || @sub_query.is_a?(String)\n\n            # Add the connect attribute to the query\n            if defined?(@connect)\n              columns.unshift(@query.arel_table[@connect[0]])\n              sub_columns.unshift(@sub_query.arel_table[@connect[0]])\n            end\n\n            # Build a column to represent the depth of the recursion\n            if @depth.present?\n              name, start, as = @depth\n              col = table[name]\n              base.select_extra_values += [col.as(as)] unless as.nil?\n\n              columns << ::Arel.sql(start.to_s).as(name)\n              sub_columns << (col + ::Arel.sql('1')).as(name)\n            end\n\n            # Build a column to represent the path of the record access\n            if @path.present?\n              name, source, as = @path\n              source = @query.arel_table[source || @connect[0]]\n\n              col = table[name]\n              base.select_extra_values += [col.as(as)] unless as.nil?\n              parts = [col, source.pg_cast(:varchar)]\n\n              columns << ::Arel.array([source]).pg_cast(:varchar, true).as(name)\n              sub_columns << FN.array_append(*parts).as(name)\n            end\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/auxiliary_statement/settings.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class AuxiliaryStatement\n      class Settings < Collector.new(:attributes, :join, :join_type, :query, :requires,\n          :polymorphic, :through, :union_all, :connect)\n\n        attr_reader :base, :source, :depth, :path\n        alias_method :select, :attributes\n        alias_method :cte, :source\n\n        delegate :relation_query?, to: Torque::PostgreSQL::AuxiliaryStatement\n        delegate :table, :table_name, to: :@source\n        delegate :sql, to: ::Arel\n\n        def initialize(base, source, recursive = false)\n          @base = base\n          @source = source\n          @recursive = recursive\n        end\n\n        def base_name\n          @base.name\n        end\n\n        def base_table\n          @base.arel_table\n        end\n\n        def recursive?\n          @recursive\n        end\n\n        def depth?\n          defined?(@depth)\n        end\n\n        def path?\n          defined?(@path)\n        end\n\n        # Add an attribute to the result showing the depth of each iteration\n        def with_depth(name = 'depth', start: 0, as: nil)\n          @depth = [name.to_s, start, as&.to_s] if recursive?\n        end\n\n        # Add an attribute to the result showing the path of each record\n        def with_path(name = 'path', source: nil, as: nil)\n          @path = [name.to_s, source&.to_s, as&.to_s] if recursive?\n        end\n\n        # Set recursive operation to use union all\n        def union_all!\n          @union_all = true if recursive?\n        end\n\n        # Add both depth and path to the result\n        def with_depth_and_path\n          with_depth && with_path\n        end\n\n        # Get the arel version of the table set on the query\n        def query_table\n          raise StandardError, 'The query is not defined yet' if query.nil?\n          return query.arel_table if relation_query?(query)\n          @query_table\n        end\n\n        # Grant an easy access to arel table columns\n        def col(name)\n          query_table[name.to_s]\n        end\n\n        alias column col\n\n        # There are three ways of setting the query:\n        # - A simple relation based on a Model\n        # - A Arel-based select manager\n        # - A string or a proc\n        def query(value = nil, command = nil)\n          return @query if value.nil?\n\n          @query = sanitize_query(value, command)\n        end\n\n        # Same as query, but for the second part of the union for recursive cte\n        def sub_query(value = nil, command = nil)\n          return unless recursive?\n          return @sub_query if value.nil?\n\n          @sub_query = sanitize_query(value, command)\n        end\n\n        # Assume `parent_` as the other part if provided a Symbol or String\n        def connect(value = nil)\n          return @connect if value.nil?\n\n          value = [value.to_sym, :\"parent_#{value}\"] \\\n            if value.is_a?(String) || value.is_a?(Symbol)\n          value = value.to_a.first if value.is_a?(Hash)\n\n          @connect = value\n        end\n\n        alias connect= connect\n\n        private\n\n          # Get the query and table from the params\n          def sanitize_query(value, command = nil)\n            return value if relation_query?(value)\n            return value if value.is_a?(::Arel::SelectManager)\n\n            command = value if command.nil? # For compatibility purposes\n            valid_type = command.respond_to?(:call) || command.is_a?(String)\n\n            raise ArgumentError, <<-MSG.squish unless valid_type\n              Only relation, string and proc are valid object types for query,\n              #{command.inspect} given.\n            MSG\n\n            command\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/auxiliary_statement.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'auxiliary_statement/settings'\nrequire_relative 'auxiliary_statement/recursive'\n\nmodule Torque\n  module PostgreSQL\n    class AuxiliaryStatement\n      TABLE_COLUMN_AS_STRING = /\\A(?:\"?(\\w+)\"?\\.)?\"?(\\w+)\"?\\z/.freeze\n\n      class << self\n        attr_reader :config, :table_name\n\n        # Find or create the class that will handle statement\n        def lookup(name, base)\n          const = name.to_s.camelize << '_' << self.name.demodulize\n          return base.const_get(const, false) if base.const_defined?(const, false)\n\n          base.const_set(const, Class.new(self)).tap do |klass|\n            klass.instance_variable_set(:@table_name, name.to_s)\n          end\n        end\n\n        # Create a new instance of an auxiliary statement\n        def instantiate(statement, base, **options)\n          klass = while base < ActiveRecord::Base\n            list = base.auxiliary_statements_list\n            break list[statement] if list.present? && list.key?(statement)\n\n            base = base.superclass\n          end\n\n          return klass.new(**options) unless klass.nil?\n          raise ArgumentError, <<-MSG.squish\n            There's no '#{statement}' auxiliary statement defined for #{base.class.name}.\n          MSG\n        end\n\n        # Fast access to statement build\n        def build(statement, base, bound_attributes = [], join_sources = [], **options)\n          klass = instantiate(statement, base, **options)\n          result = klass.build(base)\n\n          bound_attributes.concat(klass.bound_attributes)\n          join_sources.concat(klass.join_sources)\n          result\n        end\n\n        # Identify if the query set may be used as a relation\n        def relation_query?(obj)\n          !obj.nil? && obj.respond_to?(:ancestors) && \\\n            obj.ancestors.include?(ActiveRecord::Base)\n        end\n\n        # Identify if the query set may be used as arel\n        def arel_query?(obj)\n          !obj.nil? && obj.is_a?(::Arel::SelectManager)\n        end\n\n        # A way to create auxiliary statements outside of models configurations,\n        # being able to use on extensions\n        def create(table_or_settings, &block)\n          klass = Class.new(self)\n\n          if block_given?\n            klass.instance_variable_set(:@table_name, table_or_settings)\n            klass.configurator(block)\n          elsif relation_query?(table_or_settings)\n            klass.configurator(query: table_or_settings)\n          else\n            klass.configurator(table_or_settings)\n          end\n\n          klass\n        end\n\n        # Set a configuration block or static hash\n        def configurator(config)\n          if config.is_a?(Hash)\n            # Map the aliases\n            config[:attributes] = config.delete(:select) if config.key?(:select)\n\n            # Create the struct that mocks a configuration result\n            config = OpenStruct.new(config)\n            table_name = config[:query]&.klass&.name&.underscore\n            instance_variable_set(:@table_name, table_name)\n          end\n\n          @config = config\n        end\n\n        # Run a configuration block or get the static configuration\n        def configure(base, instance)\n          return @config unless @config.respond_to?(:call)\n\n          recursive = self < AuxiliaryStatement::Recursive\n          settings = Settings.new(base, instance, recursive)\n          settings.instance_exec(settings, &@config)\n          settings\n        end\n\n        # Get the arel version of the statement table\n        def table\n          @table ||= ::Arel::Table.new(table_name)\n        end\n      end\n\n      delegate :config, :table, :table_name, :relation, :configure, :relation_query?,\n        to: :class\n\n      attr_reader :bound_attributes, :join_sources\n\n      # Start a new auxiliary statement giving extra options\n      def initialize(*, **options)\n        args_key = Torque::PostgreSQL.config.auxiliary_statement.send_arguments_key\n\n        @join = options.fetch(:join, {})\n        @args = options.fetch(args_key, {})\n        @where = options.fetch(:where, {})\n        @select = options.fetch(:select, {})\n        @join_type = options[:join_type]\n\n        @bound_attributes = []\n        @join_sources = []\n      end\n\n      # Build the statement on the given arel and return the WITH statement\n      def build(base)\n        @bound_attributes.clear\n        @join_sources.clear\n\n        # Prepare all the data for the statement\n        prepare(base, configure(base, self))\n\n        # Add the join condition to the list\n        @join_sources << build_join(base)\n\n        # Return the statement with its dependencies\n        [@dependencies, ::Arel::Nodes::As.new(table, build_query(base))]\n      end\n\n      private\n\n        # Setup the statement using the class configuration\n        def prepare(base, settings)\n          requires = Array.wrap(settings.requires).flatten.compact\n          @dependencies = ensure_dependencies(requires, base).flatten.compact\n\n          @join_type ||= settings.join_type || :inner\n          @query = settings.query\n\n          # Call a proc to get the real query\n          if @query.respond_to?(:call)\n            call_args = @query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]\n            @query = @query.call(*call_args)\n          end\n\n          # Merge select attributes provided on the instance creation\n          @select = settings.attributes.merge(@select) if settings.attributes.present?\n\n          # Merge join settings\n          if settings.join.present?\n            @join = settings.join.merge(@join)\n          elsif settings.through.present?\n            @association = settings.through.to_s\n          elsif relation_query?(@query)\n            @association = base.reflections.find do |name, reflection|\n              break name if @query.klass.eql?(reflection.klass)\n            end\n          end\n        end\n\n        # Build the string or arel query\n        def build_query(base)\n          # Expose columns and get the list of the ones for select\n          columns = expose_columns(base, @query.try(:arel_table))\n\n          # Prepare the query depending on its type\n          if @query.is_a?(String)\n            args = @args.map{ |k, v| [k, base.connection.quote(v)] }.to_h\n            ::Arel.sql(\"(#{@query})\" % args)\n          elsif relation_query?(@query)\n            @query = @query.where(@where) if @where.present?\n            @bound_attributes.concat(@query.send(:bound_attributes))\n            @query.select(*columns).arel\n          else\n            raise ArgumentError, <<-MSG.squish\n              Only String and ActiveRecord::Base objects are accepted as query objects,\n              #{@query.class.name} given for #{self.class.name}.\n            MSG\n          end\n        end\n\n        # Build the join statement that will be sent to the main arel\n        def build_join(base)\n          conditions = table.create_and([])\n          builder = base.predicate_builder\n          foreign_table = base.arel_table\n\n          # Check if it's necessary to load the join from an association\n          if @association.present?\n            association = base.reflections[@association]\n\n            # Require source of a through reflection\n            if association.through_reflection?\n              base.joins(association.source_reflection_name)\n\n              # Changes the base of the connection to the reflection table\n              builder = association.klass.predicate_builder\n              foreign_table = ::Arel::Table.new(association.plural_name)\n            end\n\n            @query.merge(association.join_scope(@query.arel_table, foreign_table, base))\n\n            # Add the join constraints\n            constraint = association.build_join_constraint(table, foreign_table)\n            constraint = constraint.children if constraint.is_a?(::Arel::Nodes::And)\n            conditions.children.concat(Array.wrap(constraint))\n          end\n\n          # Build all conditions for the join on statement\n          @join.inject(conditions.children) do |arr, (left, right)|\n            left = project(left, foreign_table)\n            item = right.is_a?(Symbol) ? project(right).eq(left) : builder.build(left, right)\n            arr.push(item)\n          end\n\n          # Raise an error when there's no join conditions\n          raise ArgumentError, <<-MSG.squish if conditions.children.empty?\n            You must provide the join columns when using '#{@query.class.name}'\n            as a query object on #{self.class.name}.\n          MSG\n\n          # Build the join based on the join type\n          arel_join.new(table, table.create_on(conditions))\n        end\n\n        # Get the class of the join on arel\n        def arel_join\n          case @join_type\n          when :inner then ::Arel::Nodes::InnerJoin\n          when :left  then ::Arel::Nodes::OuterJoin\n          when :right then ::Arel::Nodes::RightOuterJoin\n          when :full  then ::Arel::Nodes::FullOuterJoin\n          else\n            raise ArgumentError, <<-MSG.squish\n              The '#{@join_type}' is not implemented as a join type.\n            MSG\n          end\n        end\n\n        # Mount the list of selected attributes\n        def expose_columns(base, query_table = nil)\n          # Add the columns necessary for the join\n          list = @join_sources.each_with_object(@select) do |join, hash|\n            join.right.expr.children.each do |item|\n              hash[item.left.name] = nil if item.left.relation.eql?(table)\n            end\n          end\n\n          # Add select columns to the query and get exposed columns\n          list.filter_map do |left, right|\n            base.select_extra_values += [table[right.to_s]] unless right.nil?\n            next unless query_table\n\n            col = project(left, query_table)\n            right.nil? ? col : col.as(right.to_s)\n          end\n        end\n\n        # Ensure that all the dependencies are loaded in the base relation\n        def ensure_dependencies(list, base)\n          with_options = list.extract_options!.to_a\n          (list + with_options).map do |name, options|\n            dependent_klass = base.model.auxiliary_statements_list[name]\n\n            raise ArgumentError, <<-MSG.squish if dependent_klass.nil?\n              The '#{name}' auxiliary statement dependency can't found on\n              #{self.class.name}.\n            MSG\n\n            next if base.auxiliary_statements_values.any? do |cte|\n              cte.is_a?(dependent_klass)\n            end\n\n            options ||= {}\n            AuxiliaryStatement.build(name, base, bound_attributes, join_sources, **options)\n          end\n        end\n\n        # Project a column on a given table, or use the column table\n        def project(column, arel_table = nil)\n          if column.respond_to?(:as)\n            return column\n          elsif (as_string = TABLE_COLUMN_AS_STRING.match(column.to_s))\n            column = as_string[2]\n            arel_table = ::Arel::Table.new(as_string[1]) unless as_string[1].nil?\n          end\n\n          arel_table ||= table\n          arel_table[column.to_s]\n        end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/base.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Base\n      extend ActiveSupport::Concern\n\n      ##\n      # :singleton-method: schema\n      # :call-seq: schema\n      #\n      # The schema to which the table belongs to.\n\n      included do\n        mattr_accessor :belongs_to_many_required_by_default, instance_accessor: false\n        class_attribute :schema, instance_writer: false\n      end\n\n      class_methods do\n        delegate :distinct_on, :with, :itself_only, :cast_records, :join_series,\n          :buckets, to: :all\n\n        # Make sure that table name is an instance of TableName class\n        def reset_table_name\n          return super unless PostgreSQL.config.schemas.enabled\n          self.table_name = TableName.new(self, super)\n        end\n\n        # Whenever the base model is inherited, add a list of auxiliary\n        # statements like the one that loads inherited records' relname\n        def inherited(subclass)\n          super\n\n          subclass.class_attribute(:auxiliary_statements_list)\n          subclass.auxiliary_statements_list = {}\n\n          record_class = ActiveRecord::Relation._record_class_attribute\n\n          # Define the dynamic attribute that returns the same information as\n          # the one provided by the auxiliary statement\n          subclass.dynamic_attribute(record_class) do\n            klass = self.class\n            next klass.table_name unless klass.physically_inheritances?\n\n            query = klass.unscoped.where(subclass.primary_key => id)\n            query.pluck(klass.arel_table['tableoid'].pg_cast('regclass')).first\n          end\n        end\n\n        # Specifies a one-to-many association. The following methods for\n        # retrieval and query of collections of associated objects will be\n        # added:\n        #\n        # +collection+ is a placeholder for the symbol passed as the +name+\n        # argument, so <tt>belongs_to_many :tags</tt> would add among others\n        # <tt>tags.empty?</tt>.\n        #\n        # [collection]\n        #   Returns a Relation of all the associated objects.\n        #   An empty Relation is returned if none are found.\n        # [collection<<(object, ...)]\n        #   Adds one or more objects to the collection by adding their ids to\n        #   the array of ids on the parent object.\n        #   Note that this operation instantly fires update SQL without waiting\n        #   for the save or update call on the parent object, unless the parent\n        #   object is a new record.\n        #   This will also run validations and callbacks of associated\n        #   object(s).\n        # [collection.delete(object, ...)]\n        #   Removes one or more objects from the collection by removing their\n        #   ids from the list on the parent object.\n        #   Objects will be in addition destroyed if they're associated with\n        #   <tt>dependent: :destroy</tt>, and deleted if they're associated\n        #   with <tt>dependent: :delete_all</tt>.\n        # [collection.destroy(object, ...)]\n        #   Removes one or more objects from the collection by running\n        #   <tt>destroy</tt> on each record, regardless of any dependent option,\n        #   ensuring callbacks are run. They will also be removed from the list\n        #   on the parent object.\n        # [collection=objects]\n        #   Replaces the collections content by deleting and adding objects as\n        #   appropriate.\n        # [collection_singular_ids]\n        #   Returns an array of the associated objects' ids\n        # [collection_singular_ids=ids]\n        #   Replace the collection with the objects identified by the primary\n        #   keys in +ids+. This method loads the models and calls\n        #   <tt>collection=</tt>. See above.\n        # [collection.clear]\n        #   Removes every object from the collection. This destroys the\n        #   associated objects if they are associated with\n        #   <tt>dependent: :destroy</tt>, deletes them directly from the\n        #   database if <tt>dependent: :delete_all</tt>, otherwise just remove\n        #   them from the list on the parent object.\n        # [collection.empty?]\n        #   Returns +true+ if there are no associated objects.\n        # [collection.size]\n        #   Returns the number of associated objects.\n        # [collection.find(...)]\n        #   Finds an associated object according to the same rules as\n        #   ActiveRecord::FinderMethods#find.\n        # [collection.exists?(...)]\n        #   Checks whether an associated object with the given conditions exists.\n        #   Uses the same rules as ActiveRecord::FinderMethods#exists?.\n        # [collection.build(attributes = {}, ...)]\n        #   Returns one or more new objects of the collection type that have\n        #   been instantiated with +attributes+ and linked to this object by\n        #   adding its +id+ to the list after saving.\n        # [collection.create(attributes = {})]\n        #   Returns a new object of the collection type that has been\n        #   instantiated with +attributes+, linked to this object by adding its\n        #   +id+ to the list after performing the save (if it passed the\n        #   validation).\n        # [collection.create!(attributes = {})]\n        #   Does the same as <tt>collection.create</tt>, but raises\n        #   ActiveRecord::RecordInvalid if the record is invalid.\n        # [collection.reload]\n        #   Returns a Relation of all of the associated objects, forcing a\n        #   database read. An empty Relation is returned if none are found.\n        #\n        # === Example\n        #\n        # A <tt>Video</tt> class declares <tt>belongs_to_many :tags</tt>,\n        # which will add:\n        # * <tt>Video#tags</tt> (similar to <tt>Tag.where([id] && tag_ids)</tt>)\n        # * <tt>Video#tags<<</tt>\n        # * <tt>Video#tags.delete</tt>\n        # * <tt>Video#tags.destroy</tt>\n        # * <tt>Video#tags=</tt>\n        # * <tt>Video#tag_ids</tt>\n        # * <tt>Video#tag_ids=</tt>\n        # * <tt>Video#tags.clear</tt>\n        # * <tt>Video#tags.empty?</tt>\n        # * <tt>Video#tags.size</tt>\n        # * <tt>Video#tags.find</tt>\n        # * <tt>Video#tags.exists?(name: 'ACME')</tt>\n        # * <tt>Video#tags.build</tt>\n        # * <tt>Video#tags.create</tt>\n        # * <tt>Video#tags.create!</tt>\n        # * <tt>Video#tags.reload</tt>\n        # The declaration can also include an +options+ hash to specialize the\n        # behavior of the association.\n        #\n        # === Options\n        # [:class_name]\n        #   Specify the class name of the association. Use it only if that name\n        #   can't be inferred from the association name. So <tt>belongs_to_many\n        #   :tags</tt> will by default be linked to the +Tag+ class, but if the\n        #   real class name is +SpecialTag+, you'll have to specify it with this\n        #   option.\n        # [:foreign_key]\n        #   Specify the foreign key used for the association. By default this is\n        #   guessed to be the name of this class in lower-case and \"_ids\"\n        #   suffixed. So a Video class that makes a #belongs_to_many association\n        #   with Tag will use \"tag_ids\" as the default <tt>:foreign_key</tt>.\n        #\n        #   It is a good idea to set the <tt>:inverse_of</tt> option as well.\n        # [:primary_key]\n        #   Specify the name of the column to use as the primary key for the\n        #   association. By default this is +id+.\n        # [:dependent]\n        #   Controls what happens to the associated objects when their owner is\n        #   destroyed. Note that these are implemented as callbacks, and Rails\n        #   executes callbacks in order. Therefore, other similar callbacks may\n        #   affect the <tt>:dependent</tt> behavior, and the <tt>:dependent</tt>\n        #   behavior may affect other callbacks.\n        # [:touch]\n        #   If true, the associated objects will be touched (the updated_at/on\n        #   attributes set to current time) when this record is either saved or\n        #   destroyed. If you specify a symbol, that attribute will be updated\n        #   with the current time in addition to the updated_at/on attribute.\n        #   Please note that with touching no validation is performed and only\n        #   the +after_touch+, +after_commit+ and +after_rollback+ callbacks are\n        #   executed.\n        # [:optional]\n        #   When set to +true+, the association will not have its presence\n        #   validated.\n        # [:required]\n        #   When set to +true+, the association will also have its presence\n        #   validated. This will validate the association itself, not the id.\n        #   You can use +:inverse_of+ to avoid an extra query during validation.\n        #   NOTE: <tt>required</tt> is set to <tt>false</tt> by default and is\n        #   deprecated. If you want to have association presence validated,\n        #   use <tt>required: true</tt>.\n        # [:default]\n        #   Provide a callable (i.e. proc or lambda) to specify that the\n        #   association should be initialized with a particular record before\n        #   validation.\n        # [:inverse_of]\n        #   Specifies the name of the #has_many association on the associated\n        #   object that is the inverse of this #belongs_to_many association.\n        #   See ActiveRecord::Associations::ClassMethods's overview on\n        #   Bi-directional associations for more detail.\n        #\n        # Option examples:\n        #   belongs_to_many :tags, dependent: :nullify\n        #   belongs_to_many :tags, required: true, touch: true\n        #   belongs_to_many :tags, default: -> { Tag.default }\n        def belongs_to_many(name, scope = nil, **options, &extension)\n          klass = Associations::Builder::BelongsToMany\n          reflection = klass.build(self, name, scope, options, &extension)\n          ::ActiveRecord::Reflection.add_reflection(self, name, reflection)\n        end\n\n        protected\n\n          # Allow optional select attributes to be loaded manually when they are\n          # not present. This is associated with auxiliary statement, which\n          # permits columns that can be loaded through CTEs, be loaded\n          # individually for a single record\n          #\n          # For instance, if you have a statement that can load an user's last\n          # comment content, by querying the comments using an auxiliary\n          # statement.\n          #   subclass.auxiliary_statement :last_comment do |cte|\n          #     cte.query Comment.order(:user_id, id: :desc)\n          #       .distinct_on(:user_id)\n          #     cte.attributes col(:content) => :last_comment\n          #     cte.join_type :left\n          #   end\n          #\n          # In case you don't use 'with(:last_comment)', you can do the\n          # following.\n          #   dynamic_attribute(:last_comment) do\n          #     comments.order(id: :desc).first.content\n          #   end\n          #\n          # This means that any auxiliary statements can have their columns\n          # granted even when they are not used\n          def dynamic_attribute(name, &block)\n            define_method(name) do\n              return read_attribute(name) if has_attribute?(name)\n              result = self.instance_exec(&block)\n\n              type_klass = ActiveRecord::Type.respond_to?(:default_value) \\\n                ? ActiveRecord::Type.default_value \\\n                : self.class.connection.type_map.send(:default_value)\n\n              @attributes[name.to_s] = ActiveRecord::Relation::QueryAttribute.new(\n                name.to_s, result, type_klass,\n              )\n\n              read_attribute(name)\n            end\n          end\n\n          # Creates a new auxiliary statement (CTE) under the base class\n          # attributes key:\n          # Provides a map of attributes to be exposed to the main query.\n          #\n          # For instance, if the statement query has an 'id' column that you\n          # want it to be accessed on the main query as 'item_id',\n          # you can use:\n          #   attributes id: :item_id, 'MAX(id)' => :max_id,\n          #     col(:id).minimum => :min_id\n          #\n          # If its statement has more tables, and you want to expose those\n          # fields, then:\n          #   attributes 'table.name': :item_name\n          #\n          # join_type key:\n          # Changes the type of the join and set the constraints\n          #\n          # The left side of the hash is the source table column, the right\n          # side is the statement table column, now it's only accepting '='\n          # constraints\n          #   join id: :user_id\n          #   join id: :'user.id'\n          #   join 'post.id': :'user.last_post_id'\n          #\n          # It's possible to change the default type of join\n          #   join :left, id: :user_id\n          #\n          # join key:\n          # Changes the type of the join\n          #\n          # query key:\n          # Save the query command to be performand\n          #\n          # requires key:\n          # Indicates dependencies with another statements\n          #\n          # polymorphic key:\n          # Indicates a polymorphic relationship, with will affect the way the\n          # auto join works, by giving a polymorphic connection\n          def auxiliary_statement(table, &block)\n            klass = AuxiliaryStatement.lookup(table, self)\n            auxiliary_statements_list[table.to_sym] = klass\n            klass.configurator(block)\n          end\n          alias cte auxiliary_statement\n\n          # Creates a new recursive auxiliary statement (CTE) under the base\n          # Very similar to the regular auxiliary statement, but with two-part\n          # query where one is executed first and the second recursively\n          def recursive_auxiliary_statement(table, &block)\n            klass = AuxiliaryStatement::Recursive.lookup(table, self)\n            auxiliary_statements_list[table.to_sym] = klass\n            klass.configurator(block)\n          end\n          alias recursive_cte recursive_auxiliary_statement\n      end\n    end\n\n    ::ActiveRecord::Base.include(Base)\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/collector.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Collector\n\n      # This class helps to collect data in different ways. Used to configure\n      # auxiliary statements\n      def self.new(*args)\n        klass = Class.new\n\n        args.flatten!\n        args.compact!\n\n        klass.module_eval do\n          args.each do |attribute|\n            define_method attribute do |*args|\n              if args.empty?\n                instance_variable_get(\"@#{attribute}\")\n              elsif args.size > 1\n                instance_variable_set(\"@#{attribute}\", args)\n              else\n                instance_variable_set(\"@#{attribute}\", args.first)\n              end\n            end\n            alias_method \"#{attribute}=\", attribute\n          end\n        end\n\n        klass\n      end\n\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/config.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    include ActiveSupport::Configurable\n\n    # Use the same logger as the Active Record one\n    def self.logger\n      ActiveRecord::Base.logger\n    end\n\n    # Allow nested configurations\n    # :TODO: Rely on +inheritable_copy+ to make nested configurations\n    config.define_singleton_method(:nested) do |name, &block|\n      klass = Class.new(ActiveSupport::Configurable::Configuration).new\n      block.call(klass) if block\n      send(\"#{name}=\", klass)\n    end\n\n    # Set if any information that requires querying and searching or collecting\n    # information should be eager loaded. This automatically changes when rails\n    # same configuration is set to true\n    config.eager_load = false\n\n    # Add support for joining any query/association with a generated series\n    config.join_series = true\n\n    # Add support for querying and calculating histogram buckets\n    config.buckets = true\n\n    # Set a list of irregular model name when associated with table names\n    config.irregular_models = {}\n    def config.irregular_models=(hash)\n      PostgreSQL.config[:irregular_models] = hash.map do |(table, model)|\n        [table.to_s, model.to_s]\n      end.to_h\n    end\n\n    # Configure associations features\n    config.nested(:associations) do |assoc|\n\n      # Define if +belongs_to_many+ associations are marked as required by\n      # default. False means that no validation will be performed\n      assoc.belongs_to_many_required_by_default = false\n\n      # Although +belongs_to_many+ does not need a custom handler when joining\n      # the last chain scope, this can allow devs to pick which way they prefer:\n      # Rails default, or ANY with a single bind to improve prepared statements\n      # assoc.optimize_for_binds = false TODO: Add support\n\n    end\n\n    # Configure multiple schemas\n    config.nested(:schemas) do |schemas|\n\n      # Enables schemas handler by this gem, not Rails's own implementation\n      schemas.enabled = true\n\n      # Defines a list of LIKE-based schemas to not consider for a multiple\n      # schema database\n      schemas.blacklist = %w[information_schema pg_%]\n\n      # Defines a list of LIKE-based schemas to consider for a multiple schema\n      # database\n      schemas.whitelist = %w[public]\n\n    end\n\n    # Configure auxiliary statement features\n    config.nested(:auxiliary_statement) do |cte|\n\n      # Enables auxiliary statements handler by this gem, not Rails's own\n      # implementation\n      cte.enabled = true\n\n      # Define the key that is used on auxiliary statements to send extra\n      # arguments to format string or send on a proc\n      cte.send_arguments_key = :args\n\n      # Estipulate a class name (which may contain namespace) that exposes the\n      # auxiliary statement in order to perform detached CTEs\n      cte.exposed_class = 'TorqueCTE'\n\n      # Estipulate a class name (which may contain namespace) that exposes the\n      # recursive auxiliary statement in order to perform detached CTEs\n      cte.exposed_recursive_class = 'TorqueRecursiveCTE'\n\n    end\n\n    # Configure ENUM features\n    config.nested(:enum) do |enum|\n\n      # Enables enum handler by this gem, not Rails's own implementation\n      enum.enabled = true\n\n      # The name of the method to be used on any ActiveRecord::Base to\n      # initialize model-based enum features\n      enum.base_method = :torque_enum\n\n      # The name of the method to be used on any ActiveRecord::Base to\n      # initialize model-based enum set features\n      enum.set_method = :torque_enum_set\n\n      # Indicates if bang methods like 'disabled!' should update the record on\n      # database or not\n      enum.save_on_bang = true\n\n      # Indicates if it should raise errors when a generated method would\n      # conflict with an existing one\n      enum.raise_conflicting = false\n\n      # Specify the namespace of each enum type of value\n      enum.namespace = nil\n\n      # Specify the scopes for I18n translations\n      enum.i18n_scopes = [\n        'activerecord.attributes.%{model}.%{attr}.%{value}',\n        'activerecord.attributes.%{attr}.%{value}',\n        'activerecord.enums.%{type}.%{value}',\n        'enum.%{type}.%{value}',\n        'enum.%{value}'\n      ]\n\n      # Specify the scopes for I18n translations but with type only\n      enum.i18n_type_scopes = Enumerator.new do |yielder|\n        enum.i18n_scopes.each do |key|\n          next if key.include?('%{model}') || key.include?('%{attr}')\n          yielder << key\n        end\n      end\n\n    end\n\n    # Configure geometry data types\n    config.nested(:geometry) do |geometry|\n\n      # Enables geometry handler by this gem, not Rails's own implementation\n      geometry.enabled = true\n\n      # Define the class that will be handling Point data types after decoding\n      # it. Any class provided here must respond to 'x', and 'y'\n      geometry.point_class = ActiveRecord::Point\n\n      # Define the class that will be handling Box data types after decoding it.\n      # Any class provided here must respond to 'x1', 'y1', 'x2', and 'y2'\n      geometry.box_class = nil\n\n      # Define the class that will be handling Circle data types after decoding\n      # it. Any class provided here must respond to 'x', 'y', and 'r'\n      geometry.circle_class = nil\n\n      # Define the class that will be handling Line data types after decoding\n      # it. Any class provided here must respond to 'a', 'b', and 'c'\n      geometry.line_class = nil\n\n      # Define the class that will be handling Segment data types after decoding\n      # it. Any class provided here must respond to 'x1', 'y1', 'x2', and 'y2'\n      geometry.segment_class = nil\n\n    end\n\n    # Configure inheritance features\n    config.nested(:inheritance) do |inheritance|\n\n      # Define the lookup of models from their given name to be inverted, which\n      # means that they are going to be form the last namespaced one to the\n      # most namespaced one\n      inheritance.inverse_lookup = true\n\n      # Determines the name of the column used to collect the table of each\n      # record. When the table has inheritance tables, this column will return\n      # the name of the table that actually holds the record\n      inheritance.record_class_column_name = :_record_class\n\n      # Determines the name of the column used when identifying that the loaded\n      # records should be casted to its correctly model. This will be TRUE for\n      # the records mentioned on `cast_records`\n      inheritance.auto_cast_column_name = :_auto_cast\n\n    end\n\n    # Configure period features\n    config.nested(:period) do |period|\n\n      # Enables period handler by this gem\n      period.enabled = true\n\n      # The name of the method to be used on any ActiveRecord::Base to\n      # initialize model-based period features\n      period.base_method = :period_for\n\n      # The default name for a threshold attribute, which will automatically\n      # enable threshold features\n      period.auto_threshold = :threshold\n\n      # Define the list of methods that will be created by default while setting\n      # up a new period field\n      period.method_names = {\n        current_on:            '%s_on',                       # 00\n        current:               'current_%s',                  # 01\n        not_current:           'not_current_%s',              # 02\n        containing:            '%s_containing',               # 03\n        not_containing:        '%s_not_containing',           # 04\n        overlapping:           '%s_overlapping',              # 05\n        not_overlapping:       '%s_not_overlapping',          # 06\n        starting_after:        '%s_starting_after',           # 07\n        starting_before:       '%s_starting_before',          # 08\n        finishing_after:       '%s_finishing_after',          # 09\n        finishing_before:      '%s_finishing_before',         # 10\n\n        real_containing:       '%s_real_containing',          # 11\n        real_overlapping:      '%s_real_overlapping',         # 12\n        real_starting_after:   '%s_real_starting_after',      # 13\n        real_starting_before:  '%s_real_starting_before',     # 14\n        real_finishing_after:  '%s_real_finishing_after',     # 15\n        real_finishing_before: '%s_real_finishing_before',    # 16\n\n        containing_date:       '%s_containing_date',          # 17\n        not_containing_date:   '%s_not_containing_date',      # 18\n        overlapping_date:      '%s_overlapping_date',         # 19\n        not_overlapping_date:  '%s_not_overlapping_date',     # 20\n        real_containing_date:  '%s_real_containing_date',     # 21\n        real_overlapping_date: '%s_real_overlapping_date',    # 22\n\n        current?:              'current_%s?',                 # 23\n        current_on?:           'current_%s_on?',              # 24\n        start:                 '%s_start',                    # 25\n        finish:                '%s_finish',                   # 26\n        real:                  'real_%s',                     # 27\n        real_start:            '%s_real_start',               # 28\n        real_finish:           '%s_real_finish',              # 29\n      }\n\n      # If the period is marked as direct access, without the field name,\n      # then these method names will replace the default ones\n      period.direct_method_names = {\n        current_on:          'happening_in',\n        containing:          'during',\n        not_containing:      'not_during',\n        real_containing:     'real_during',\n\n        containing_date:     'during_date',\n        not_containing_date: 'not_during_date',\n\n        current_on?:         'happening_in?',\n        start:               'start_at',\n        finish:              'finish_at',\n        real:                'real_time',\n        real_start:          'real_start_at',\n        real_finish:         'real_finish_at',\n      }\n\n    end\n\n    # Configure period features\n    config.nested(:interval) do |interval|\n\n      # Enables interval handler by this gem, not Rails's own implementation\n      interval.enabled = true\n\n    end\n\n    # Configure arel additional features\n    config.nested(:arel) do |arel|\n\n      # When provided, the initializer will expose the Arel function helper on\n      # the given module\n      config.expose_function_helper_on = nil\n\n      # List of Arel INFIX operators that will be made available for using as\n      # methods on Arel::Nodes::Node and Arel::Attribute\n      arel.infix_operators = {\n        'contained_by'        => '<@',\n        'has_key'             => '?',\n        'has_all_keys'        => '?&',\n        'has_any_keys'        => '?|',\n        'strictly_left'       => '<<',\n        'strictly_right'      => '>>',\n        'doesnt_right_extend' => '&<',\n        'doesnt_left_extend'  => '&>',\n        'adjacent_to'         => '-|-',\n      }\n\n    end\n\n    # Configure full text search features\n    config.nested(:full_text_search) do |fts|\n\n      # Enables full text search handler by this gem\n      fts.enabled = true\n\n      # The name of the method to be used on any ActiveRecord::Base to\n      # initialize model-based full text search features\n      fts.base_method = :torque_search_for\n\n      # Defines the default language when generating search vector columns\n      fts.default_language = 'english'\n\n      # Defines the default mode to be used when generating full text search\n      # queries. It can be one of the following:\n      #   - :default (to_tsquery)\n      #   - :phrase (phraseto_tsquery)\n      #   - :plain (plainto_tsquery)\n      #   - :web (websearch_to_tsquery)\n      fts.default_mode = :phrase\n\n      # Defines the default index type to be used when creating search vector.\n      # It still requires that the column requests an index\n      fts.default_index_type = :gin\n\n    end\n\n    # Configure predicate builder additional features\n    config.nested(:predicate_builder) do |builder|\n\n      # List which handlers are enabled by default\n      builder.enabled = %i[regexp arel_attribute enumerator_lazy]\n\n      # When active, values provided to array attributes will be handled more\n      # friendly. It will use the +ANY+ operator on a equality check and\n      # overlaps when the given value is an array\n      builder.handle_array_attributes = false\n\n      # Make sure that the predicate builder will not spend more than 20ms\n      # trying to produce the underlying array\n      builder.lazy_timeout = 0.02\n\n      # Since lazy array is uncommon, it is better to limit the number of\n      # entries we try to pull so we don't cause a timeout or a long wait\n      # iteration\n      builder.lazy_limit = 2_000\n\n    end\n\n    # Configure versioned commands features\n    config.nested(:versioned_commands) do |vs|\n\n      # This is a feature that developers must explicitly opt-in. It is designed\n      # in a way that prevents a large impact on Rails' original migrations\n      # behavior. But, it is still a feature that everyone may not need, and\n      # some may complain about the additional schema table, which also uses\n      # inheritance\n      vs.enabled = false\n\n      # Define the list of commands that are going to be versioned by this\n      # method\n      vs.types = %i[function type view]\n\n      # The name of the table that will inherit from +schema_migrations+ and\n      # store the list of versioned commands that have been executed\n      vs.table_name = 'schema_versioned_commands'\n\n    end\n\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/function.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    # Simplified module for creating arel functions. This is used internally\n    # but can also be made available to other devs on their own projects\n    module Function\n      class << self\n\n        # A facilitator to create a bind param that is fully compatible with\n        # Arel and ActiveRecord\n        def bind(*args)\n          attr = ::ActiveRecord::Relation::QueryAttribute.new(*args)\n          ::Arel::Nodes::BindParam.new(attr)\n        end\n\n        # Just a shortcut to create a bind param for a model attribute and a\n        # value for it\n        def bind_for(model, attribute, value)\n          bind(attribute, value, model.attribute_types[attribute])\n        end\n\n        # Another shortcut, when we already have the arel attribute at hand\n        def bind_with(arel_attribute, value)\n          bind(arel_attribute.name, value, arel_attribute.type_caster)\n        end\n\n        # A facilitator to create a bind param with a specific type\n        def bind_type(value, type = nil, name: 'value', cast: nil)\n          type ||= ruby_type_to_model_type(value)\n          type = ActiveModel::Type.lookup(type) if type.is_a?(Symbol)\n          result = bind(name, value, type)\n          cast ? result.pg_cast(cast) : result\n        end\n\n        # A facilitator to create an infix operation\n        def infix(op, left, right)\n          ::Arel::Nodes::InfixOperation.new(op, left, right)\n        end\n\n        # A facilitator to use several Infix operators to concatenate all the\n        # provided arguments. Arguments won't be sanitized, as other methods\n        # under this module\n        def concat(*args)\n          return args.first if args.one?\n          args.reduce { |left, right| infix(:\"||\", left, right) }\n        end\n\n        # A simple helper to trick Rails into producing the right SQL for\n        # grouping operations\n        def group_by(arel, name)\n          Arel::Nodes::Ref.new(name.to_s, arel)\n        end\n\n        # As of now, this indicates that it supports any direct calls, since\n        # the idea is to simply map to an Arel function with the same name,\n        # without checking if it actually exists\n        def respond_to_missing?(*)\n          true\n        end\n\n        # This method is used to catch any method calls that are not defined\n        # in this module. It will simply return an Arel function with the same\n        # name as the method called, passing all arguments to it, without\n        # any sanitization\n        def method_missing(name, *args, &block)\n          ::Arel::Nodes::NamedFunction.new(name.to_s.upcase, args)\n        end\n\n        private\n\n          def ruby_type_to_model_type(value)\n            case value\n            when Integer then :integer\n            when Float then :float\n            when String then :string\n            when Time, ActiveSupport::TimeWithZone then :time\n            when TrueClass, FalseClass then :boolean\n            when DateTime then :datetime\n            when Date then :date\n            when BigDecimal then :decimal\n            when ActiveSupport::Duration\n              Adapter::OID::Interval.new\n            else\n              raise ArgumentError, \"Cannot infer type from value: #{value.inspect}.\"\n            end\n          end\n\n      end\n    end\n\n    FN = Function\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/geometry_builder.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class GeometryBuilder < ActiveModel::Type::Value\n\n      DESTRUCTOR = /[<>{}()]/.freeze\n      NUMBER_SERIALIZER = ->(num) { num.to_s.gsub(/\\.0$/, '') }\n\n      def type\n        return self.class.const_get('TYPE') if self.class.const_defined?('TYPE')\n        self.class.const_set('TYPE', self.class.name.demodulize.underscore.to_sym)\n      end\n\n      def pieces\n        self.class.const_get('PIECES')\n      end\n\n      def formation\n        self.class.const_get('FORMATION')\n      end\n\n      def cast(value)\n        case value\n        when ::String\n          return if value.blank?\n          value.gsub!(DESTRUCTOR, '')\n          build_klass(*value.split(','))\n        when ::Hash\n          build_klass(*value.symbolize_keys.slice(*pieces).values)\n        when ::Array\n          build_klass(*(value.flatten))\n        else\n          value\n        end\n      end\n\n      def serialize(value)\n        parts =\n          case value\n          when config_class\n            pieces.map { |piece| value.public_send(piece) }\n          when ::Hash\n            value.symbolize_keys.slice(*pieces).values\n          when ::Array\n            value.flatten\n          end\n\n        parts = parts&.compact&.flatten\n        return if parts.blank?\n\n        raise 'Invalid format' if parts.size < pieces.size\n        format(formation, *parts.first(pieces.size).map(&number_serializer))\n      end\n\n      def deserialize(value)\n        build_klass(*value.gsub(DESTRUCTOR, '').split(',')) unless value.nil?\n      end\n\n      def type_cast_for_schema(value)\n        if config_class === value\n          pieces.map { |piece| value.public_send(piece) }\n        else\n          super\n        end\n      end\n\n      def changed_in_place?(raw_old_value, new_value)\n        raw_old_value != serialize(new_value)\n      end\n\n      protected\n\n        def number_serializer\n          self.class.const_get('NUMBER_SERIALIZER')\n        end\n\n        def config_class\n          Torque::PostgreSQL.config.geometry.public_send(\"#{type}_class\")\n        end\n\n        def build_klass(*args)\n          return nil if args.empty?\n          check_invalid_format!(args)\n\n          config_class.new(*args.try(:first, pieces.size)&.map(&:to_f))\n        end\n\n        def check_invalid_format!(args)\n          raise 'Invalid format' if args.size < pieces.size\n        end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/i18n.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module I18n\n\n      # Adds extra suport to localize durations\n      # This is a temporary solution, since 3600.seconds does not translate into\n      # 1 hour\n      def localize(locale, object, format = :default, options = {})\n        return super unless object.is_a?(ActiveSupport::Duration)\n        object.inspect\n      end\n\n    end\n\n    ::I18n::Backend::Base.prepend I18n\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/inheritance.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    InheritanceError = Class.new(ArgumentError)\n\n    module Inheritance\n      extend ActiveSupport::Concern\n\n      # Cast the given object to its correct class\n      def cast_record\n        record_class_value = send(self.class._record_class_attribute)\n\n        return self unless self.class.table_name != record_class_value\n        klass = self.class.casted_dependents[record_class_value]\n        self.class.raise_unable_to_cast(record_class_value) if klass.nil?\n\n        # The record need to be re-queried to have its attributes loaded\n        # :TODO: Improve this by only loading the necessary extra columns\n        klass.find(self.id)\n      end\n\n      class_methods do\n        delegate :_auto_cast_attribute, :_record_class_attribute, to: ActiveRecord::Relation\n\n        # Get a full list of all attributes from a model and all its dependents\n        def inheritance_merged_attributes\n          @inheritance_merged_attributes ||= begin\n            children = casted_dependents.values.flat_map(&:attribute_names)\n            attribute_names.to_set.merge(children).to_a.freeze\n          end\n        end\n\n        # Get the list of attributes that can be merged while querying because\n        # they all have the same type\n        def inheritance_mergeable_attributes\n          @inheritance_mergeable_attributes ||= begin\n            base = inheritance_merged_attributes - attribute_names\n            types = base.zip(base.size.times.map { [] }).to_h\n\n            casted_dependents.values.each do |klass|\n              klass.attribute_types.each do |column, type|\n                types[column]&.push(type)\n              end\n            end\n\n            result = types.filter_map do |attribute, types|\n              attribute if types.each_with_object(types.shift).all?(&:==)\n            end\n\n            (attribute_names + result).freeze\n          end\n        end\n\n        # Check if the model's table depends on any inheritance\n        def physically_inherited?\n          return @physically_inherited if defined?(@physically_inherited)\n\n          @physically_inherited = connection.schema_cache.dependencies(\n            defined?(@table_name) ? @table_name : decorated_table_name,\n          ).present?\n        rescue ActiveRecord::ConnectionNotEstablished\n          false\n        end\n\n        # Get the list of all tables directly or indirectly dependent of the\n        # current one\n        def inheritance_dependents\n          connection.schema_cache.associations(table_name) || []\n        end\n\n        # Check whether the model's table has directly or indirectly dependents\n        def physically_inheritances?\n          inheritance_dependents.present?\n        end\n\n        # Get the list of all ActiveRecord classes directly or indirectly\n        # associated by inheritance\n        def casted_dependents\n          @casted_dependents ||= inheritance_dependents.map do |table_name|\n            [table_name, connection.schema_cache.lookup_model(table_name)]\n          end.to_h\n        end\n\n        # Manually set the model name associated with tables name in order to\n        # facilitates the identification of inherited records\n        def reset_table_name\n          table = super\n\n          adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter\n          if Torque::PostgreSQL.config.eager_load && connection.is_a?(adapter)\n            connection.schema_cache.add_model_name(table, self)\n          end\n\n          table\n        end\n\n        # Get the final decorated table, regardless of any special condition\n        def decorated_table_name\n          parent_class = try(:module_parent) || try(:parent)\n          if parent_class < Base && !parent_class.abstract_class?\n            contained = parent_class.table_name\n            contained = contained.singularize if parent_class.pluralize_table_names\n            contained += \"_\"\n          end\n\n          \"#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}\"\n        end\n\n        # For all main purposes, physical inherited classes should have\n        # base_class as their own\n        def base_class\n          physically_inherited? ? self : super\n        end\n\n        # Primary key is one exception when getting information about the class,\n        # it must returns the superclass PK\n        def primary_key\n          physically_inherited? ? superclass.primary_key : super\n        end\n\n        # Add an additional check to return the name of the table even when the\n        # class is inherited, but only if it is a physical inheritance\n        def compute_table_name\n          physically_inherited? ? decorated_table_name : super\n        end\n\n        # Raises an error message saying that the giver record class was not\n        # able to be casted since the model was not identified\n        def raise_unable_to_cast(record_class_value)\n          raise InheritanceError.new(<<~MSG.squish)\n            An record was not able to be casted to type '#{record_class_value}'.\n            If this table name doesn't represent a guessable model,\n            please use 'Torque::PostgreSQL.conf.irregular_models =\n            { '#{record_class_value}' => 'ModelName' }'.\n          MSG\n        end\n\n        private\n\n          # If the class is physically inherited, the klass needs to be properly\n          # changed before moving forward\n          def instantiate_instance_of(klass, attributes, types = {}, &block)\n            return super unless klass.physically_inheritances?\n\n            real_class = torque_discriminate_class_for_record(klass, attributes)\n            return super if real_class.nil?\n\n            attributes, types = sanitize_attributes(real_class, attributes, types)\n            super(real_class, attributes, types, &block)\n          end\n\n          # Unwrap the attributes and column types from the given class when\n          # there are unmergeable attributes\n          def sanitize_attributes(real_class, attributes, types)\n            skip = (inheritance_merged_attributes - real_class.attribute_names).to_set\n            skip.merge(real_class.attribute_names - inheritance_mergeable_attributes)\n            return [attributes, types] if skip.empty?\n\n            dropped = 0\n            new_types = {}\n\n            row = attributes.instance_variable_get(:@row).dup\n            indexes = attributes.instance_variable_get(:@column_indexes).dup\n            indexes = indexes.each_with_object({}) do |(column, index), new_indexes|\n              attribute, prefix = column.split('__', 2).reverse\n              current_index = index - dropped\n\n              if prefix != table_name && skip.include?(attribute)\n                row.delete_at(current_index)\n                dropped += 1\n              else\n                new_types.merge!(types.slice(attribute))\n                new_types[current_index] = types[index]\n                new_indexes[attribute] = current_index\n              end\n            end\n\n            [ActiveRecord::Result::IndexedRow.new(indexes, row), new_types]\n          end\n\n          # Get the real class when handling physical inheritances and casting\n          # the record when existing properly is present\n          def torque_discriminate_class_for_record(klass, record)\n            return if record[_auto_cast_attribute.to_s] == false\n\n            embedded_type = record[_record_class_attribute.to_s]\n            return if embedded_type.blank? || embedded_type == table_name\n\n            casted_dependents[embedded_type] || raise_unable_to_cast(embedded_type)\n          end\n      end\n    end\n\n    ActiveRecord::Base.include Inheritance\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/insert_all.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module InsertAll\n      attr_reader :where\n\n      def initialize(*args, where: nil, **xargs)\n        super(*args, **xargs)\n\n        @where = where\n      end\n    end\n\n    module InsertAll::Builder\n      delegate :where, to: :insert_all\n\n      def where_condition?\n        !where.nil?\n      end\n    end\n\n    ActiveRecord::InsertAll.prepend InsertAll\n    ActiveRecord::InsertAll::Builder.include InsertAll::Builder\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/migration/command_recorder.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Migration\n      module CommandRecorder\n\n        # Records the rename operation for types\n        def rename_type(*args, &block)\n          record(:rename_type, args, &block)\n        end\n\n        # Inverts the type rename operation\n        def invert_rename_type(args)\n          [:rename_type, args.reverse]\n        end\n\n        # Records the creation of a schema\n        def create_schema(*args, &block)\n          record(:create_schema, args, &block)\n        end\n\n        # Inverts the creation of a schema\n        def invert_create_schema(args)\n          [:drop_schema, [args.first]]\n        end\n\n      end\n\n      ActiveRecord::Migration::CommandRecorder.include CommandRecorder\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/migration.rb",
    "content": "require_relative 'migration/command_recorder'\n"
  },
  {
    "path": "lib/torque/postgresql/predicate_builder/arel_attribute_handler.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module PredicateBuilder\n      class ArelAttributeHandler\n        # Shortcut\n        def self.call(*args)\n          new.call(*args)\n        end\n\n        def initialize(*)\n          # There is no need to use or save the predicate builder here\n        end\n\n        def call(attribute, value)\n          case\n          when array_typed?(attribute) && array_typed?(value) then attribute.overlaps(value)\n          when array_typed?(attribute) then value.eq(FN.any(attribute))\n          when array_typed?(value) then attribute.eq(FN.any(value))\n          else attribute.eq(value)\n          end\n        end\n\n        private\n\n          def array_typed?(attribute)\n            attribute.able_to_type_cast? && attribute.type_caster.is_a?(ARRAY_OID)\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/predicate_builder/array_handler.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module PredicateBuilder\n      module ArrayHandler\n        def call(attribute, value)\n          return super unless array_attribute?(attribute) &&\n            PostgreSQL.config.predicate_builder.handle_array_attributes\n\n          call_for_array(attribute, value)\n        end\n\n        def call_for_array(attribute, value)\n          if !value.is_a?(::Array)\n            call_with_value(attribute, value)\n          elsif value.any?\n            call_with_array(attribute, value)\n          else\n            call_with_empty(attribute)\n          end\n        end\n\n        private\n\n          def call_with_value(attribute, value)\n            FN.infix(:\"=\", FN.bind_with(attribute, value), FN.any(attribute))\n          end\n\n          def call_with_array(attribute, value)\n            attribute.overlaps(FN.bind_with(attribute, value))\n          end\n\n          def call_with_empty(attribute)\n            FN.cardinality(attribute).eq(0)\n          end\n\n          def array_attribute?(attribute)\n            attribute.type_caster.is_a?(ARRAY_OID)\n          end\n      end\n\n      ::ActiveRecord::PredicateBuilder::ArrayHandler.prepend(ArrayHandler)\n      ::ActiveRecord::PredicateBuilder::BasicObjectHandler.prepend(ArrayHandler)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/predicate_builder/enumerator_lazy_handler.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module PredicateBuilder\n      class EnumeratorLazyHandler < ::ActiveRecord::PredicateBuilder::ArrayHandler\n        Timeout = Class.new(::Timeout::Error)\n\n        def call(attribute, value)\n          with_timeout do\n            super(attribute, limit.nil? ? value.force : value.first(limit))\n          end\n        end\n\n        private\n\n          def with_timeout\n            return yield if timeout.nil?\n\n            begin\n              ::Timeout.timeout(timeout) { yield }\n            rescue ::Timeout::Error\n              raise Timeout, \"Lazy predicate builder timed out after #{timeout} seconds\"\n            end\n          end\n\n          def timeout\n            PostgreSQL.config.predicate_builder.lazy_timeout\n          end\n\n          def limit\n            PostgreSQL.config.predicate_builder.lazy_limit\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/predicate_builder/regexp_handler.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module PredicateBuilder\n      class RegexpHandler\n        def initialize(predicate_builder)\n          @predicate_builder = predicate_builder\n        end\n\n        def call(attribute, value)\n          operator = value.casefold? ? :\"~*\" : :\"~\"\n          FN.infix(operator, attribute, FN.bind_with(attribute, value.source))\n        end\n\n        private\n          attr_reader :predicate_builder\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/predicate_builder.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'predicate_builder/array_handler'\n\nrequire_relative 'predicate_builder/regexp_handler'\nrequire_relative 'predicate_builder/arel_attribute_handler'\nrequire_relative 'predicate_builder/enumerator_lazy_handler'\n\nmodule Torque\n  module PostgreSQL\n    module PredicateBuilder\n      ARRAY_OID = ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array\n\n      def initialize(*)\n        super\n\n        handlers = Array.wrap(PostgreSQL.config.predicate_builder.enabled).inquiry\n\n        if handlers.regexp?\n          register_handler(Regexp, RegexpHandler.new(self))\n        end\n\n        if handlers.enumerator_lazy?\n          register_handler(Enumerator::Lazy, EnumeratorLazyHandler.new(self))\n        end\n\n        if handlers.arel_attribute?\n          register_handler(::Arel::Attributes::Attribute, ArelAttributeHandler.new(self))\n        end\n      end\n    end\n\n    ::ActiveRecord::PredicateBuilder.prepend(PredicateBuilder)\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/railtie.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    # = Torque PostgreSQL Railtie\n    class Railtie < Rails::Railtie # :nodoc:\n      # Get information from the running rails app\n      initializer 'torque-postgresql' do |app|\n        ActiveSupport.on_load(:active_record_postgresqladapter) do\n          ActiveSupport.on_load(:active_record) do\n            torque_config = Torque::PostgreSQL.config\n            torque_config.eager_load = app.config.eager_load\n\n            # TODO: Only load files that have their features enabled, like CTE\n\n            ar_type = ActiveRecord::Type\n\n            # Setup belongs_to_many association\n            ActiveRecord::Base.belongs_to_many_required_by_default =\n              torque_config.associations.belongs_to_many_required_by_default\n\n            ## General features\n            if torque_config.join_series\n              require_relative 'relation/join_series'\n              Relation.include(Relation::JoinSeries)\n            end\n\n            if torque_config.buckets\n              require_relative 'relation/buckets'\n              Relation.include(Relation::Buckets)\n            end\n\n            ## Schemas Enabled Setup\n            if (config = torque_config.schemas).enabled\n              require_relative 'adapter/schema_overrides'\n            end\n\n            ## CTE Enabled Setup\n            if (config = torque_config.auxiliary_statement).enabled\n              require_relative 'auxiliary_statement'\n              require_relative 'relation/auxiliary_statement'\n              Relation.include(Relation::AuxiliaryStatement)\n\n              # Define the exposed constant for both types of auxiliary statements\n              if config.exposed_class.present?\n                *ns, name = config.exposed_class.split('::')\n                base = ns.present? ? ::Object.const_get(ns.join('::')) : ::Object\n                base.const_set(name, AuxiliaryStatement)\n\n                *ns, name = config.exposed_recursive_class.split('::')\n                base = ns.present? ? ::Object.const_get(ns.join('::')) : ::Object\n                base.const_set(name, AuxiliaryStatement::Recursive)\n              end\n            end\n\n            ## Enum Enabled Setup\n            if (config = torque_config.enum).enabled\n              require_relative 'adapter/oid/enum'\n              require_relative 'adapter/oid/enum_set'\n\n              require_relative 'attributes/enum'\n              require_relative 'attributes/enum_set'\n\n              Attributes::Enum.include_on(ActiveRecord::Base)\n              Attributes::EnumSet.include_on(ActiveRecord::Base)\n\n              ar_type.register(:enum,     Adapter::OID::Enum,    adapter: :postgresql)\n              ar_type.register(:enum_set, Adapter::OID::EnumSet, adapter: :postgresql)\n\n              if config.namespace == false\n                # TODO: Allow enum classes to exist without a namespace\n                config.namespace = PostgreSQL.const_set('Enum', Module.new)\n              else\n                config.namespace ||= ::Object.const_set('Enum', Module.new)\n\n                # Define a method to find enumerators based on the namespace\n                config.namespace.define_singleton_method(:const_missing) do |name|\n                  Attributes::Enum.lookup(name)\n                end\n\n                # Define a helper method to get a sample value\n                config.namespace.define_singleton_method(:sample) do |name|\n                  Attributes::Enum.lookup(name).sample\n                end\n              end\n            end\n\n            ## Geometry Enabled Setup\n            if (config = torque_config.geometry).enabled\n              require_relative 'adapter/oid/box'\n              require_relative 'adapter/oid/circle'\n              require_relative 'adapter/oid/line'\n              require_relative 'adapter/oid/segment'\n\n              ar_type.register(:box,     Adapter::OID::Box,     adapter: :postgresql)\n              ar_type.register(:circle,  Adapter::OID::Circle,  adapter: :postgresql)\n              ar_type.register(:line,    Adapter::OID::Line,    adapter: :postgresql)\n              ar_type.register(:segment, Adapter::OID::Segment, adapter: :postgresql)\n            end\n\n            ## Period Enabled Setup\n            if (config = torque_config.period).enabled\n              require_relative 'attributes/period'\n              Attributes::Period.include_on(ActiveRecord::Base)\n            end\n\n            ## Interval Enabled Setup\n            if (config = torque_config.interval).enabled\n              require_relative 'adapter/oid/interval'\n              ar_type.register(:interval, Adapter::OID::Interval, adapter: :postgresql)\n            end\n\n            ## Full Text Search Enabled Setup\n            if (config = torque_config.full_text_search).enabled\n              require_relative 'attributes/full_text_search'\n              Attributes::FullTextSearch.include_on(ActiveRecord::Base)\n            end\n\n            ## Arel Setup\n            PostgreSQL::Arel.build_operations(torque_config.arel.infix_operators)\n            if (mod = torque_config.arel.expose_function_helper_on&.to_s)\n              parent, _, name = mod.rpartition('::')\n              parent = parent ? parent.constantize : ::Object\n\n              raise ArgumentError, <<~MSG.squish if parent.const_defined?(name)\n                Unable to expose Arel function helper on #{mod} because the constant\n                #{name} is already defined on #{parent}. Please choose a different name.\n              MSG\n\n              parent.const_set(name, PostgreSQL::FN)\n            end\n\n            ## Versioned Commands Setup\n            if (config = torque_config.versioned_commands).enabled\n              require_relative 'versioned_commands'\n\n              ActiveRecord::Schema::Definition.include(Adapter::Definition)\n            end\n\n            # Make sure to load all the types that are handled by this gem on\n            # each individual PG connection\n            adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter\n            ActiveRecord::Base.connection_handler.each_connection_pool do |pool|\n              next unless pool.db_config.adapter_class.is_a?(adapter)\n\n              pool.with_connection { |conn| conn.torque_load_additional_types }\n            end\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/abstract_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      module AbstractReflection\n        AREL_ATTR = ::Arel::Attributes::Attribute\n        AREL_NODE = ::Arel::Nodes::Node\n\n        # Check if the foreign key actually exists\n        def connected_through_array?\n          false\n        end\n\n        # Connection through an array-like attribute is more complex then just\n        # a simple eq. This needs to go through the channel that handles larger\n        # situations\n        def join_scope(table, foreign_table, foreign_klass)\n          return super unless connected_through_array?\n\n          table_md = ActiveRecord::TableMetadata.new(klass, table)\n          predicate_builder = klass.predicate_builder.with(table_md)\n          scope_chain_items = join_scopes(table, predicate_builder)\n          klass_scope       = klass_join_scope(table, predicate_builder)\n\n          klass_scope.where!(build_id_constraint_between(table, foreign_table))\n          scope_chain_items.inject(klass_scope, &:merge!)\n        end\n\n        # Manually build the join constraint\n        def build_join_constraint(table, foreign_table)\n          result = build_id_constraint_between(table, foreign_table)\n          result = table.create_and([result, klass.send(:type_condition, table)]) \\\n            if klass.finder_needs_type_condition?\n\n          result\n        end\n\n        private\n\n          # This one is a lot simpler, now that we have a predicate builder that\n          # knows exactly what to do with 2 array-like attributes\n          def build_id_constraint_between(table, foreign_table)\n            PredicateBuilder::ArelAttributeHandler.call(\n              table[join_primary_key],\n              foreign_table[join_foreign_key],\n            )\n          end\n      end\n\n      ::ActiveRecord::Reflection::AbstractReflection.prepend(AbstractReflection)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/association_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      module AssociationReflection\n\n        def initialize(name, scope, options, active_record)\n          super\n\n          raise ArgumentError, <<-MSG.squish if options[:array] && options[:polymorphic]\n            Associations can't be connected through an array at the same time they are\n            polymorphic. Please choose one of the options.\n          MSG\n        end\n\n        private\n\n          # Check if the foreign key should be pluralized\n          def derive_foreign_key(*, **)\n            result = super\n            result = ActiveSupport::Inflector.pluralize(result) \\\n              if collection? && connected_through_array?\n            result\n          end\n\n          # returns either +nil+ or the inverse association name that it finds.\n          def automatic_inverse_of\n            return super unless connected_through_array?\n\n            if can_find_inverse_of_automatically?(self)\n              inverse_name = options[:as] || active_record.name.demodulize\n              inverse_name = ActiveSupport::Inflector.underscore(inverse_name)\n              inverse_name = ActiveSupport::Inflector.pluralize(inverse_name)\n              inverse_name = inverse_name.to_sym\n\n              begin\n                reflection = klass._reflect_on_association(inverse_name)\n              rescue NameError\n                # Give up: we couldn't compute the klass type so we won't be able\n                # to find any associations either.\n                reflection = false\n              end\n\n              return inverse_name if valid_inverse_reflection?(reflection)\n            end\n          end\n\n      end\n\n      ::ActiveRecord::Reflection::AssociationReflection.prepend(AssociationReflection)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/belongs_to_many_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      class BelongsToManyReflection < ::ActiveRecord::Reflection::AssociationReflection\n        def macro\n          :belongs_to_many\n        end\n\n        def connected_through_array?\n          true\n        end\n\n        def belongs_to?\n          true\n        end\n\n        def collection?\n          true\n        end\n\n        def association_class\n          Associations::BelongsToManyAssociation\n        end\n\n        def foreign_key\n          @foreign_key ||= options[:foreign_key]&.to_s || derive_foreign_key.freeze\n        end\n\n        def association_foreign_key\n          @association_foreign_key ||= foreign_key\n        end\n\n        def active_record_primary_key\n          @active_record_primary_key ||= options[:primary_key]&.to_s || derive_primary_key\n        end\n\n        def join_primary_key(*)\n          active_record_primary_key\n        end\n\n        def join_foreign_key\n          foreign_key\n        end\n\n        def array_attribute\n          active_record.arel_table[foreign_key]\n        end\n\n        private\n\n          def derive_primary_key\n            klass.primary_key\n          end\n\n          def derive_foreign_key\n            \"#{name.to_s.singularize}_ids\"\n          end\n      end\n\n      ::ActiveRecord::Reflection.const_set(:BelongsToManyReflection, BelongsToManyReflection)\n\n      reflection_class = ::ActiveRecord::Reflection::AssociationReflection\n      reflection_class::VALID_AUTOMATIC_INVERSE_MACROS.push(:belongs_to_many) \\\n        if reflection_class.const_defined?('VALID_AUTOMATIC_INVERSE_MACROS')\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/has_many_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      module HasManyReflection\n        def connected_through_array?\n          options[:array]\n        end\n\n        def array_attribute\n          klass.arel_table[foreign_key]\n        end\n      end\n\n      ::ActiveRecord::Reflection::HasManyReflection.include(HasManyReflection)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/runtime_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      module RuntimeReflection\n        delegate :klass, :active_record, :connected_through_array?, :macro, :name,\n          :array_attribute, to: :@reflection\n      end\n\n      ::ActiveRecord::Reflection::RuntimeReflection.include(RuntimeReflection)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection/through_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n      module ThroughReflection\n        delegate :build_id_constraint, :connected_through_array?, to: :source_reflection\n      end\n\n      ::ActiveRecord::Reflection::ThroughReflection.include(ThroughReflection)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/reflection.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'reflection/abstract_reflection'\nrequire_relative 'reflection/association_reflection'\nrequire_relative 'reflection/belongs_to_many_reflection'\nrequire_relative 'reflection/has_many_reflection'\nrequire_relative 'reflection/runtime_reflection'\nrequire_relative 'reflection/through_reflection'\n\nmodule Torque\n  module PostgreSQL\n    module Reflection\n\n      def create(macro, name, scope, options, ar)\n        return super unless macro.eql?(:belongs_to_many)\n        BelongsToManyReflection.new(name, scope, options, ar)\n      end\n\n    end\n\n    ::ActiveRecord::Reflection.singleton_class.prepend(Reflection)\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/auxiliary_statement.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module AuxiliaryStatement\n\n        # :nodoc:\n        def auxiliary_statements_values\n          @values.fetch(:auxiliary_statements, FROZEN_EMPTY_ARRAY)\n        end\n        # :nodoc:\n        def auxiliary_statements_values=(value)\n          assert_modifiable!\n          @values[:auxiliary_statements] = value\n        end\n\n        # Set use of an auxiliary statement\n        def with(*args, **settings)\n          spawn.with!(*args, **settings)\n        end\n\n        # Like #with, but modifies relation in place.\n        def with!(*args, **settings)\n          instantiate_auxiliary_statements(*args, **settings)\n          self\n        end\n\n        alias_method :auxiliary_statements, :with\n        alias_method :auxiliary_statements!, :with!\n\n        # Get all auxiliary statements bound attributes and the base bound\n        # attributes as well\n        def bound_attributes\n          visitor = ::Arel::Visitors::PostgreSQL.new(ActiveRecord::Base.connection)\n          visitor.accept(self.arel.ast, ::Arel::Collectors::Composite.new(\n            ::Arel::Collectors::SQLString.new,\n            ::Arel::Collectors::Bind.new,\n          )).value.last\n        end\n\n        private\n\n          # Hook arel build to add the distinct on clause\n          def build_arel(*)\n            arel = super\n            type = auxiliary_statement_type\n            sub_queries = build_auxiliary_statements(arel)\n            sub_queries.nil? ? arel : arel.with(*type, *sub_queries)\n          end\n\n          # Instantiate one or more auxiliary statements for the given +klass+\n          def instantiate_auxiliary_statements(*args, **options)\n            klass = PostgreSQL::AuxiliaryStatement\n            klass = klass::Recursive if options.delete(:recursive).present?\n\n            self.auxiliary_statements_values += args.map do |table|\n              if table.is_a?(Class) && table < klass\n                table.new(**options)\n              else\n                klass.instantiate(table, self, **options)\n              end\n            end\n          end\n\n          # Build all necessary data for auxiliary statements\n          def build_auxiliary_statements(arel)\n            return unless auxiliary_statements_values.present?\n            auxiliary_statements_values.map do |klass|\n              klass.build(self).tap { arel.join_sources.concat(klass.join_sources) }\n            end\n          end\n\n          # Return recursive if any auxiliary statement is recursive\n          def auxiliary_statement_type\n            klass = PostgreSQL::AuxiliaryStatement::Recursive\n            :recursive if auxiliary_statements_values.any?(klass)\n          end\n\n          # Throw an error showing that an auxiliary statement of the given\n          # table name isn't defined\n          def auxiliary_statement_error(name)\n            raise ArgumentError, <<-MSG.squish\n              There's no '#{name}' auxiliary statement defined for #{self.class.name}.\n            MSG\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/buckets.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module Buckets\n\n        # :nodoc:\n        def buckets_value\n          @values.fetch(:buckets, nil)\n        end\n        # :nodoc:\n        def buckets_value=(value)\n          assert_modifiable!\n          @values[:buckets] = value\n        end\n\n        # Specifies how to bucket records. It works for both the calculations\n        # or just putting records into groups. For example:\n        #\n        #   User.buckets(:created_at, [1.year.ago, 1.month.ago, 1.week.ago])\n        #   # Returns all users grouped by created_at in the given time ranges\n        #\n        #   User.buckets(:age, 0..100, step: 10).count\n        #   # Counts all users grouped by age buckets of 10 years\n        def buckets(*value, **xargs)\n          spawn.buckets!(*value, **xargs)\n        end\n\n        # Like #buckets, but modifies relation in place.\n        def buckets!(attribute, values, count: nil, cast: nil, as: nil)\n          raise ArgumentError, <<~MSG.squish if !values.is_a?(Array) && !values.is_a?(Range)\n            Buckets must be an array or a range.\n          MSG\n\n          count ||= 1 if values.is_a?(Range)\n          attribute = arel_table[attribute] unless ::Arel.arel_node?(attribute)\n          self.buckets_value = [attribute, values, count, cast, as]\n          self\n        end\n\n        # When performing calculations with buckets, this method add a grouping\n        # clause to the query by the bucket values, and then adjust the keys\n        # to match provided values\n        def calculate(*)\n          return super if buckets_value.blank?\n\n          raise ArgumentError, <<~MSG.squish if group_values.present?\n            Cannot calculate with buckets when there are already group values.\n          MSG\n\n          keys = buckets_keys\n          self.group_values = [FN.group_by(build_buckets_node, :bucket)]\n          super.transform_keys { |key| keys[key - 1] }\n        end\n\n        module Initializer\n          # Hook into the output of records to make sure we group by the buckets\n          def records\n            return super if buckets_value.blank?\n\n            keys = buckets_keys\n            col = buckets_column\n            super.group_by do |record|\n              val = (record[col] || 0) - 1\n              keys[val] if val >= 0 && val < keys.size\n            end\n          end\n        end\n\n        private\n\n          # Hook arel build to add the column\n          def build_arel(*)\n            return super if buckets_value.blank? || select_values.present?\n\n            self.select_extra_values += [build_buckets_node.as(buckets_column)]\n            super\n          end\n\n          # Build the Arel node for the buckets function\n          def build_buckets_node\n            attribute, values, count, cast, * = buckets_value\n\n            if values.is_a?(Range)\n              FN.width_bucket(\n                attribute,\n                FN.bind_type(values.begin, name: 'bucket_start', cast: 'numeric'),\n                FN.bind_type(values.end, name: 'bucket_end', cast: 'numeric'),\n                FN.bind_type(count, name: 'bucket_count', cast: 'integer'),\n              )\n            else\n              FN.width_bucket(attribute, ::Arel.array(values, cast: cast))\n            end\n          end\n\n          # Returns the column used for buckets, if any\n          def buckets_column\n            buckets_value.last&.to_s || 'bucket'\n          end\n\n          # Transform a range into the proper keys for buckets\n          def buckets_keys\n            keys = buckets_value.second\n            return keys unless keys.is_a?(Range)\n\n            left = nil\n            step = buckets_value.third\n            step = (keys.end - keys.begin).fdiv(step)\n            step = step.to_i if step.to_i == step\n            keys.step(step).each_with_object([]) do |right, result|\n              next left = right if left.nil?\n\n              start, left = left, right\n              result << Range.new(start, left, true)\n            end\n          end\n\n      end\n\n      Initializer.include(Buckets::Initializer)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/distinct_on.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module DistinctOn\n\n        # :nodoc:\n        def distinct_on_values\n          @values.fetch(:distinct_on, FROZEN_EMPTY_ARRAY)\n        end\n        # :nodoc:\n        def distinct_on_values=(value)\n          assert_modifiable!\n          @values[:distinct_on] = value\n        end\n\n        # Specifies whether the records should be unique or not by a given set\n        # of fields. For example:\n        #\n        #   User.distinct_on(:name)\n        #   # Returns 1 record per distinct name\n        #\n        #   User.distinct_on(:name, :email)\n        #   # Returns 1 record per distinct name and email\n        #\n        #   User.distinct_on(false)\n        #   # You can also remove the uniqueness\n        def distinct_on(*value)\n          spawn.distinct_on!(*value)\n        end\n\n        # Like #distinct_on, but modifies relation in place.\n        def distinct_on!(*value)\n          self.distinct_on_values = value\n          self\n        end\n\n        private\n\n          # Hook arel build to add the distinct on clause\n          def build_arel(*)\n            arel = super\n            value = self.distinct_on_values\n            arel.distinct_on(resolve_column(value)) if value.present?\n            arel\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/inheritance.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module Inheritance\n\n        # :nodoc:\n        def cast_records_values\n          @values.fetch(:cast_records, FROZEN_EMPTY_ARRAY)\n        end\n        # :nodoc:\n        def cast_records_values=(value)\n          assert_modifiable!\n          @values[:cast_records] = value\n        end\n\n        # :nodoc:\n        def itself_only_value\n          @values.fetch(:itself_only, nil)\n        end\n        # :nodoc:\n        def itself_only_value=(value)\n          assert_modifiable!\n          @values[:itself_only] = value\n        end\n\n        delegate :quote_table_name, :quote_column_name, to: :connection\n\n        # Specify that the results should come only from the table that the\n        # entries were created on. For example:\n        #\n        #   Activity.itself_only\n        #   # Does not return entries for inherited tables\n        def itself_only\n          spawn.itself_only!\n        end\n\n        # Like #itself_only, but modifies relation in place.\n        def itself_only!(*)\n          self.itself_only_value = true\n          self\n        end\n\n        # Enables the casting of all returned records. The result will include\n        # all the information needed to instantiate the inherited models\n        #\n        #   Activity.cast_records\n        #   # The result list will have many different classes, for all\n        #   # inherited models of activities\n        def cast_records(*types, **options)\n          spawn.cast_records!(*types, **options)\n        end\n\n        # Like #cast_records, but modifies relation in place\n        def cast_records!(*types, **options)\n          where!(regclass.pg_cast(:varchar).in(types.map(&:table_name))) if options[:filter]\n          self.select_extra_values += [regclass.as(_record_class_attribute.to_s)]\n          self.cast_records_values = (types.present? ? types : model.casted_dependents.values)\n          self\n        end\n\n        private\n\n          # Hook arel build to add any necessary table\n          def build_arel(*)\n            arel = super\n            arel.only if self.itself_only_value === true\n            build_inheritances(arel)\n            arel\n          end\n\n          # Build all necessary data for inheritances\n          def build_inheritances(arel)\n            return if self.cast_records_values.empty?\n\n            mergeable = inheritance_mergeable_attributes\n\n            columns = build_inheritances_joins(arel, self.cast_records_values)\n            columns = columns.map do |column, arel_tables|\n              next arel_tables.first[column] if arel_tables.size == 1\n\n              if mergeable.include?(column)\n                FN.coalesce(*arel_tables.each_with_object(column).map(&:[])).as(column)\n              else\n                arel_tables.map { |table| table[column].as(\"#{table.left.name}__#{column}\") }\n              end\n            end\n\n            columns.push(build_auto_caster_marker(arel, self.cast_records_values))\n            self.select_extra_values += columns.flatten if columns.any?\n          end\n\n          # Build as many left outer join as necessary for each dependent table\n          def build_inheritances_joins(arel, types)\n            columns = Hash.new{ |h, k| h[k] = [] }\n            base_on_key = model.arel_table[primary_key]\n            base_attributes = model.attribute_names\n\n            # Iterate over each casted dependent calculating the columns\n            types.each.with_index do |model, idx|\n              join_table = model.arel_table.alias(\"\\\"i_#{idx}\\\"\")\n              arel.outer_join(join_table).on(base_on_key.eq(join_table[primary_key]))\n              (model.attribute_names - base_attributes).each do |column|\n                columns[column] << join_table\n              end\n            end\n\n            # Return the list of needed columns\n            columns.default_proc = nil\n            columns\n          end\n\n          def build_auto_caster_marker(arel, types)\n            attribute = regclass.pg_cast(:varchar).in(types.map(&:table_name))\n            attribute.as(self.class._auto_cast_attribute.to_s)\n          end\n\n          def regclass\n            arel_table['tableoid'].pg_cast(:regclass)\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/join_series.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module JoinSeries\n\n        # Create the proper arel join\n        class << self\n          def build(relation, range, with: nil, as: :series, step: nil, time_zone: nil, cast: nil, mode: :inner, &block)\n            validate_build!(range, step)\n\n            args = [bind_value(range.begin), bind_value(range.end)]\n            args << bind_value(step) if step\n            args << bind_value(time_zone) if time_zone\n\n            result = Arel::Nodes::Ref.new(as.to_s)\n            func = FN.generate_series(*args).as(as.to_s)\n            condition = build_join_on(result, relation, with, cast, &block)\n            arel_join(mode).new(func, func.create_on(condition))\n          end\n\n          private\n\n            # Make sure we have a viable range\n            def validate_build!(range, step)\n              raise ArgumentError, <<~MSG.squish unless range.is_a?(Range)\n                Value must be a Range.\n              MSG\n\n              raise ArgumentError, <<~MSG.squish if range.begin.nil?\n                Beginless Ranges are not supported.\n              MSG\n\n              raise ArgumentError, <<~MSG.squish if range.end.nil?\n                Endless Ranges are not supported.\n              MSG\n\n              raise ArgumentError, <<~MSG.squish if !range.begin.is_a?(Numeric) && step.nil?\n                missing keyword: :step\n              MSG\n            end\n\n            # Creates the proper bind value\n            def bind_value(value)\n              case value\n              when Integer\n                FN.bind_type(value, :integer, name: 'series', cast: 'integer')\n              when Float\n                FN.bind_type(value, :float, name: 'series', cast: 'numeric')\n              when String\n                FN.bind_type(value, :string, name: 'series', cast: 'text')\n              when ActiveSupport::TimeWithZone\n                FN.bind_type(value, :time, name: 'series', cast: 'timestamptz')\n              when Time\n                FN.bind_type(value, :time, name: 'series', cast: 'timestamp')\n              when DateTime\n                FN.bind_type(value, :datetime, name: 'series', cast: 'timestamp')\n              when ActiveSupport::Duration\n                FN.bind_type(value.iso8601, :string, name: 'series', cast: 'interval')\n              when Date then bind_value(value.to_time(:utc))\n              when ::Arel::Attributes::Attribute then value\n              else\n                raise ArgumentError, \"Unsupported value type: #{value.class}\"\n              end\n            end\n\n            # Get the class of the join on arel\n            def arel_join(mode)\n              case mode.to_sym\n              when :inner then ::Arel::Nodes::InnerJoin\n              when :left  then ::Arel::Nodes::OuterJoin\n              when :right then ::Arel::Nodes::RightOuterJoin\n              when :full  then ::Arel::Nodes::FullOuterJoin\n              else\n                raise ArgumentError, <<-MSG.squish\n                  The '#{mode}' is not implemented as a join type.\n                MSG\n              end\n            end\n\n            # Build the join on clause\n            def build_join_on(result, relation, with, cast)\n              raise ArgumentError, <<~MSG.squish if with.nil? && !block_given?\n                missing keyword: :with\n              MSG\n\n              return yield(result, relation.arel_table) if block_given?\n\n              with = relation.arel_table[with.to_s] if with.is_a?(Symbol)\n              with = with.pg_cast(cast) if cast && with.respond_to?(:pg_cast)\n\n              (cast ? result.pg_cast(cast) : result).eq(with)\n            end\n        end\n\n        # Creates a new join based on PG +generate_series()+ function. It is\n        # based on ranges, supports numbers and dates (as per PG documentation),\n        # custom stepping, time zones, and more. This simply coordinates the\n        # initialization of the the proper join\n        def join_series(range, **xargs, &block)\n          spawn.join_series!(range, **xargs, &block)\n        end\n\n        # Like #join_series, but modifies relation in place.\n        def join_series!(range, **xargs, &block)\n          self.joins_values |= [JoinSeries.build(self, range, **xargs, &block)]\n          self\n        end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation/merger.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      module Merger\n\n        def merge # :nodoc:\n          super\n\n          merge_select_extra\n          merge_distinct_on\n          merge_auxiliary_statements\n          merge_inheritance\n          merge_buckets\n\n          relation\n        end\n\n        private\n\n          # Merge extra select columns\n          def merge_select_extra\n            relation.select_extra_values.concat(other.select_extra_values).uniq! \\\n              if other.select_extra_values.present?\n          end\n\n          # Merge distinct on columns\n          def merge_distinct_on\n            return unless relation.is_a?(Relation::DistinctOn)\n            return if other.distinct_on_values.blank?\n\n            relation.distinct_on_values += other.distinct_on_values\n          end\n\n          # Merge auxiliary statements activated by +with+\n          def merge_auxiliary_statements\n            return unless defined?(Relation::AuxiliaryStatement) && relation.is_a?(Relation::AuxiliaryStatement)\n            return if other.auxiliary_statements_values.blank?\n\n            current = relation.auxiliary_statements_values.map{ |cte| cte.class }\n            other.auxiliary_statements_values.each do |other|\n              next if current.include?(other.class)\n              relation.auxiliary_statements_values += [other]\n              current << other.class\n            end\n          end\n\n          # Merge settings related to inheritance tables\n          def merge_inheritance\n            return unless relation.is_a?(Relation::Inheritance)\n\n            relation.itself_only_value = true if other.itself_only_value.present?\n\n            if other.cast_records_values.present?\n              relation.cast_records_values += other.cast_records_values\n              relation.cast_records_values.uniq!\n            end\n          end\n\n          # Merge settings related to buckets\n          def merge_buckets\n            return unless defined?(Relation::Buckets) && relation.is_a?(Relation::Buckets)\n            return if other.buckets_value.blank?\n\n            relation.buckets_value = other.buckets_value\n          end\n\n      end\n\n      ActiveRecord::Relation::Merger.prepend Merger\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/relation.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'relation/distinct_on'\nrequire_relative 'relation/inheritance'\n\nrequire_relative 'relation/merger'\n\nmodule Torque\n  module PostgreSQL\n    module Relation\n      extend ActiveSupport::Concern\n\n      include DistinctOn\n      include Inheritance\n\n      SINGLE_VALUE_METHODS = %i[itself_only buckets]\n      MULTI_VALUE_METHODS = %i[\n        select_extra distinct_on auxiliary_statements cast_records\n      ]\n\n      VALUE_METHODS = SINGLE_VALUE_METHODS + MULTI_VALUE_METHODS\n      FROZEN_EMPTY_ARRAY = ::ActiveRecord::QueryMethods::FROZEN_EMPTY_ARRAY\n\n      ARColumn = ::ActiveRecord::ConnectionAdapters::PostgreSQL::Column\n\n      # :nodoc:\n      def select_extra_values\n        @values.fetch(:select_extra, FROZEN_EMPTY_ARRAY)\n      end\n      # :nodoc:\n      def select_extra_values=(value)\n        assert_modifiable!\n        @values[:select_extra] = value\n      end\n\n      # Resolve column name when calculating models, allowing the column name to\n      # be more complex while keeping the query selection quality\n      def calculate(operation, column_name)\n        column_name = resolve_column(column_name).first if column_name.is_a?(Hash)\n        super(operation, column_name)\n      end\n\n      # Resolve column definition up to second value.\n      # For example, based on Post model:\n      #\n      #   resolve_column(['name', :title])\n      #   # Returns ['name', '\"posts\".\"title\"']\n      #\n      #   resolve_column([:title, {authors: :name}])\n      #   # Returns ['\"posts\".\"title\"', '\"authors\".\"name\"']\n      #\n      #   resolve_column([{authors: [:name, :age]}])\n      #   # Returns ['\"authors\".\"name\"', '\"authors\".\"age\"']\n      def resolve_column(list, base = false)\n        base = resolve_base_table(base)\n\n        Array.wrap(list).map do |item|\n          case item\n          when String\n            ::Arel.sql(klass.send(:sanitize_sql, item.to_s))\n          when Symbol\n            base ? base.arel_table[item] : klass.arel_table[item]\n          when Array\n            resolve_column(item, base)\n          when Hash\n            raise ArgumentError, 'Unsupported Hash for attributes on third level' if base\n            item.map { |key, other_list| resolve_column(other_list, key) }\n          else\n            raise ArgumentError, \"Unsupported argument type: #{value} (#{value.class})\"\n          end\n        end.flatten\n      end\n\n      # Get the TableMetadata from a relation\n      def resolve_base_table(relation)\n        return unless relation\n\n        table = predicate_builder.send(:table)\n        if table.associated_with?(relation.to_s)\n          table.associated_table(relation.to_s).send(:klass)\n        else\n          raise ArgumentError, \"Relation for #{relation} not found on #{klass}\"\n        end\n      end\n\n      # Serialize the given value so it can be used in a condition tha involves\n      # the given column\n      def cast_for_condition(column, value)\n        column = columns_hash[column.to_s] unless column.is_a?(ARColumn)\n        caster = connection.lookup_cast_type_from_column(column)\n        connection.type_cast(caster.serialize(value))\n      end\n\n      private\n\n        def build_arel(*)\n          arel = super\n          arel.project(*select_extra_values) if select_values.blank?\n          arel\n        end\n\n      class_methods do\n        # Easy and storable way to access the name used to get the record table\n        # name when using inheritance tables\n        def _record_class_attribute\n          @@record_class ||= Torque::PostgreSQL.config\n            .inheritance.record_class_column_name.to_sym\n        end\n\n        # Easy and storable way to access the name used to get the indicate of\n        # auto casting inherited records\n        def _auto_cast_attribute\n          @@auto_cast ||= Torque::PostgreSQL.config\n            .inheritance.auto_cast_column_name.to_sym\n        end\n      end\n\n      # When a relation is created, force the attributes to be defined,\n      # because the type mapper may add new methods to the model. This happens\n      # for the given model Klass and its inheritances\n      module Initializer\n        def initialize(klass, *, **)\n          super\n\n          klass.superclass.send(:relation) if klass.define_attribute_methods &&\n            klass.superclass != ActiveRecord::Base && !klass.superclass.abstract_class?\n        end\n\n        # Allow extra keyword arguments to be sent to +InsertAll+\n        def upsert_all(attributes, **xargs)\n          xargs = xargs.reverse_merge(on_duplicate: :update)\n          ::ActiveRecord::InsertAll.execute(self, attributes, **xargs)\n        end\n      end\n    end\n\n    # Include the methods here provided and then change the constants to ensure\n    # the operation of ActiveRecord Relation\n    ActiveRecord::Relation.include Relation\n    ActiveRecord::Relation.prepend Relation::Initializer\n\n    ActiveRecord::Relation::SINGLE_VALUE_METHODS.concat(Relation::SINGLE_VALUE_METHODS)\n    ActiveRecord::Relation::MULTI_VALUE_METHODS.concat(Relation::MULTI_VALUE_METHODS)\n    ActiveRecord::Relation::VALUE_METHODS.concat(Relation::VALUE_METHODS)\n    ActiveRecord::QueryMethods::VALID_UNSCOPING_VALUES.merge(%i[cast_records itself_only\n      distinct_on auxiliary_statements buckets])\n\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/schema_cache/bound_schema_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module BoundSchemaReflection\n      def add_model_name(table_name, model)\n        source = defined?(@pool) ? @pool : @connection\n        @schema_reflection.add_model_name(source, table_name, model)\n      end\n\n      def dependencies(table_name)\n        source = defined?(@pool) ? @pool : @connection\n        @schema_reflection.dependencies(source, table_name)\n      end\n\n      def associations(table_name)\n        source = defined?(@pool) ? @pool : @connection\n        @schema_reflection.associations(source, table_name)\n      end\n\n      def lookup_model(table_name, scoped_class = '')\n        source = defined?(@pool) ? @pool : @connection\n        @schema_reflection.lookup_model(source, table_name, scoped_class)\n      end\n    end\n\n    ActiveRecord::ConnectionAdapters::BoundSchemaReflection.prepend BoundSchemaReflection\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/schema_cache/inheritance.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module SchemaCache\n      module Inheritance\n\n        # Try to find a model based on a given table\n        def lookup_model(table_name, scoped_class = '', source_to_model:)\n          scoped_class = scoped_class.name if scoped_class.is_a?(Class)\n          return source_to_model[table_name] if source_to_model.key?(table_name)\n\n          # Get all the possible scopes\n          scopes = scoped_class.scan(/(?:::)?[A-Z][a-z]+/)\n          scopes.unshift('Object::')\n\n          # Check if the table name comes with a schema\n          if table_name.include?('.')\n            schema, table_name = table_name.split('.')\n            scopes.insert(1, schema.camelize) if schema != 'public'\n          end\n\n          # Consider the maximum namespaced possible model name\n          max_name = table_name.tr('_', '/').camelize.split(/(::)/)\n          max_name[-1] = max_name[-1].singularize\n\n          # Test all the possible names against all the possible scopes\n          until scopes.size == 0\n            scope = scopes.join.chomp('::').safe_constantize\n            model = find_model(max_name, table_name, scope) unless scope.nil?\n            return source_to_model[table_name] = model unless model.nil?\n            scopes.pop\n          end\n\n          # If this part is reach, no model name was found\n          raise LookupError.new(<<~MSG.squish)\n            Unable to find a valid model that is associated with the\n            '#{table_name}' table. Please, check if they correctly inherit from\n            ActiveRecord::Base\n          MSG\n        end\n\n        protected\n\n          # Find a model by a given max namespaced class name that matches the\n          # given table name\n          def find_model(max_name, table_name, scope = Object)\n            pieces = max_name.is_a?(::Array) ? max_name : max_name.split(/(::)/)\n            ns_places = (1..(max_name.size - 1)).step(2).to_a\n\n            # Generate all possible combinations\n            conditions = []\n            range = Torque::PostgreSQL.config.inheritance.inverse_lookup \\\n              ? 0.upto(ns_places.size) \\\n              : ns_places.size.downto(0)\n            range.each do |size|\n              conditions.concat(ns_places.combination(size).to_a)\n            end\n\n            # Now iterate over\n            while (condition = conditions.shift)\n              ns_places.each do |i|\n                pieces[i] = condition.include?(i) ? '::' : ''\n              end\n\n              candidate = pieces.join\n              candidate.prepend(\"#{scope.name}::\") unless scope === Object\n\n              klass = candidate.safe_constantize\n              next if klass.nil?\n\n              # Check if the class match the table name\n              return klass if klass < ::ActiveRecord::Base &&\n                klass.table_name == table_name\n            end\n          end\n\n          # Calculates the inverted dependency (association), where even indirect\n          # inheritance comes up in the list\n          def generate_associations(inheritance_dependencies)\n            return {} if inheritance_dependencies.empty?\n\n            result = Hash.new{ |h, k| h[k] = [] }\n            masters = inheritance_dependencies.values.flatten.uniq\n\n            # Add direct associations\n            masters.map do |master|\n              inheritance_dependencies.each do |(dependent, associations)|\n                result[master] << dependent if associations.include?(master)\n              end\n            end\n\n            # Add indirect associations\n            result.each do |master, children|\n              children.each do |child|\n                children.concat(result[child]).uniq! if result.key?(child)\n              end\n            end\n\n            # Remove the default proc that would create new entries\n            result.default_proc = nil\n            result\n          end\n\n          # Parse the Torque config into the proper hash of irregular models.\n          # This is smart enough to only load necessary models\n          def prepare_irregular_models(data_sources)\n            entries = Torque::PostgreSQL.config.irregular_models\n            entries.slice(*data_sources).each_with_object({}) do |(table, model), hash|\n              hash[table] = model.is_a?(Class) ? model : model.constantize\n            end\n          end\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/schema_cache/schema_reflection.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module SchemaReflection\n      def add_model_name(source, table_name, model)\n        cache(source).add_model_name(source, table_name, model)\n      end\n\n      def dependencies(source, table_name)\n        cache(source).dependencies(source, table_name)\n      end\n\n      def associations(source, table_name)\n        cache(source).associations(source, table_name)\n      end\n\n      def lookup_model(source, table_name, scoped_class)\n        cache(source).lookup_model(table_name, scoped_class)\n      end\n    end\n\n    ActiveRecord::ConnectionAdapters::SchemaReflection.prepend SchemaReflection\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/schema_cache.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'torque/postgresql/schema_cache/inheritance'\nrequire 'torque/postgresql/schema_cache/schema_reflection'\nrequire 'torque/postgresql/schema_cache/bound_schema_reflection'\n\nmodule Torque\n  module PostgreSQL\n    LookupError = Class.new(ArgumentError)\n\n    # :TODO: Create the +add+ to load inheritance info\n    module SchemaCache\n      include Torque::PostgreSQL::SchemaCache::Inheritance\n\n      def initialize(*) # :nodoc:\n        super\n\n        @data_sources_model_names = {}\n        @inheritance_dependencies = {}\n        @inheritance_associations = {}\n        @inheritance_loaded = false\n      end\n\n      def initialize_dup(*) # :nodoc:\n        super\n        @data_sources_model_names = @data_sources_model_names.dup\n        @inheritance_dependencies = @inheritance_dependencies.dup\n        @inheritance_associations = @inheritance_associations.dup\n      end\n\n      def encode_with(coder) # :nodoc:\n        super\n        coder['data_sources_model_names'] = @data_sources_model_names\n        coder['inheritance_dependencies'] = @inheritance_dependencies\n        coder['inheritance_associations'] = @inheritance_associations\n      end\n\n      def init_with(coder) # :nodoc:\n        super\n        @data_sources_model_names = coder['data_sources_model_names']\n        @inheritance_dependencies = coder['inheritance_dependencies']\n        @inheritance_associations = coder['inheritance_associations']\n      end\n\n      def add(connection_or_table_name, table_name = connection_or_table_name, *) # :nodoc:\n        super\n\n        # Reset inheritance information when a table is added\n        if @data_sources.key?(table_name)\n          @inheritance_dependencies.clear\n          @inheritance_associations.clear\n          @inheritance_loaded = false\n        end\n      end\n\n      def clear! # :nodoc:\n        super\n        @data_sources_model_names.clear\n        @inheritance_dependencies.clear\n        @inheritance_associations.clear\n        @inheritance_loaded = false\n      end\n\n      def size # :nodoc:\n        super + [\n          @data_sources_model_names,\n          @inheritance_dependencies,\n          @inheritance_associations,\n        ].map(&:size).inject(:+)\n      end\n\n      def clear_data_source_cache!(connection_or_name, name = connection_or_name) # :nodoc:\n        super\n        @data_sources_model_names.delete name\n        @inheritance_dependencies.delete name\n        @inheritance_associations.delete name\n      end\n\n      def marshal_dump # :nodoc:\n        super + [\n          @inheritance_dependencies,\n          @inheritance_associations,\n          @data_sources_model_names,\n          @inheritance_loaded,\n        ]\n      end\n\n      def marshal_load(array) # :nodoc:\n        @inheritance_loaded = array.pop\n        @data_sources_model_names = array.pop\n        @inheritance_associations = array.pop\n        @inheritance_dependencies = array.pop\n        super\n      end\n\n      # A way to manually add models name so it doesn't need the lookup method\n      def add_model_name(*args)\n        model, *source = args.reverse\n        return unless data_source_exists?(*source.reverse) && model.is_a?(Class)\n\n        @data_sources_model_names[source.first] = model\n      end\n\n      # Get all the tables that the given one inherits from\n      def dependencies(source, table_name = source)\n        reload_inheritance_data!(source == table_name ? connection : source)\n        @inheritance_dependencies[table_name]\n      end\n\n      # Get the list of all tables that are associated (direct or indirect\n      # inheritance) with the provided one\n      def associations(source, table_name = source)\n        reload_inheritance_data!(source == table_name ? connection : source)\n        @inheritance_associations[table_name]\n      end\n\n      # Override the inheritance implementation to pass over the proper cache of\n      # the existing association between data sources and model names\n      def lookup_model(*args, **xargs)\n        super(*args, **xargs, source_to_model: @data_sources_model_names)\n      end\n\n      private\n\n        # Reload information about tables inheritance and dependencies, uses a\n        # cache to not perform additional checks\n        def reload_inheritance_data!(source)\n          return if @inheritance_loaded\n\n          source.with_connection do |connection|\n            @inheritance_dependencies = connection.inherited_tables\n            @inheritance_associations = generate_associations\n            @inheritance_loaded = true\n          end\n        end\n\n        # Calculates the inverted dependency (association), where even indirect\n        # inheritance comes up in the list\n        def generate_associations\n          super(@inheritance_dependencies)\n        end\n\n        # Use this method to also load any irregular model name\n        def add_all(source = nil)\n          super\n\n          data_sources = source.present? ? tables_to_cache(source) : @data_sources.keys\n          @data_sources_model_names = prepare_irregular_models(data_sources)\n        end\n    end\n\n    ActiveRecord::ConnectionAdapters::SchemaCache.prepend SchemaCache\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/table_name.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class TableName < Delegator\n      def initialize(klass, table_name)\n        @klass = klass\n        @table_name = table_name\n      end\n\n      def schema\n        return @schema if defined?(@schema)\n\n        @schema = ([@klass] + @klass.module_parents[0..-2]).find do |klass|\n          next unless klass.respond_to?(:schema) && !(value = klass.schema).nil?\n          break value\n        end\n      end\n\n      def to_s\n        schema.nil? ? @table_name : \"#{schema}.#{@table_name}\"\n      end\n\n      alias __getobj__ to_s\n\n      def ==(other)\n        other.to_s =~ /(\"?#{schema | search_path_schemes.join('|')}\"?\\.)?\"?#{@table_name}\"?/\n      end\n\n      def __setobj__(value)\n        @table_name = value\n      end\n\n      private\n\n        def search_path_schemes\n          klass.connection.schemas_search_path_sanitized\n        end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/version.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    VERSION = '4.0.1'\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands/command_migration.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module VersionedCommands\n      module Migration\n        def initialize(*args)\n          @command = args.pop\n          super(*args)\n        end\n\n        # Prepare the description based on the direction\n        def migrate(direction)\n          @description = description_for(direction)\n          super\n        end\n\n        # Uses the command to execute the proper action\n        def exec_migration(conn, direction)\n          @connection = conn\n          direction == :up ? @command.up : @command.down\n        ensure\n          @connection = nil\n          @execution_strategy = nil\n        end\n\n        # Better formatting of the output\n        def announce(message)\n          action, result = @description\n\n          title = [\n            @command.type.capitalize,\n            @command.object_name,\n            \"v#{@command.op_version}\"\n          ].join(' ')\n\n          timing = message.split(' ', 2).second\n          action = \"#{result} #{timing}\" if timing.present?\n          text = \"#{@command.version} #{title}: #{action}\"\n          length = [0, 75 - text.length].max\n\n          write \"== %s %s\" % [text, \"=\" * length]\n        end\n\n        # Produces a nice description of what is being done\n        def description_for(direction)\n          base = @command.op.chomp('e') if direction == :up\n          base ||=\n            case @command.op\n            when 'create' then 'dropp'\n            when 'update' then 'revert'\n            when 'remove' then 're-creat'\n            end\n\n          [\"#{base}ing\", \"#{base}ed\"]\n        end\n\n        # Print the command and then execute it\n        def execute(command)\n          write \"-- #{command.gsub(/(?<!\\A)^/, '   ').gsub(/[\\s\\n]*\\z/, '')}\"\n          execution_strategy.execute(command)\n        end\n      end\n\n      CommandMigration = Struct.new(*%i[filename version op type object_name op_version scope]) do\n        delegate :execute, to: '@migration'\n\n        def initialize(filename, *args)\n          super(File.expand_path(filename), *args)\n          @migration = nil\n        end\n\n        # Rails uses this to avoid duplicate migrations\n        def name\n          \"#{op}_#{type}_#{object_name}_v#{op_version}\"\n        end\n\n        # There is no way to setup this, so it is always false\n        def disable_ddl_transaction\n          false\n        end\n\n        # Down is more complicated, then this just starts separating the logic\n        def migrate(direction)\n          @migration = ActiveRecord::Migration.allocate\n          @migration.extend(Migration)\n          @migration.send(:initialize, name, version, self)\n          @migration.migrate(direction)\n        ensure\n          @migration = nil\n        end\n\n        # Simply executes the underlying command\n        def up\n          content = File.read(filename)\n          VersionedCommands.validate!(type, content, object_name)\n          execute content\n        end\n\n        # Find the previous command and executes it\n        def down\n          return drop if op_version == 1\n          dirs = @migration.pool.migrations_paths\n          version = op_version - (op == 'remove' ? 0 : 1)\n          execute VersionedCommands.fetch_command(dirs, type, object_name, version)\n        end\n\n        # Drops the type created\n        def drop\n          method_name = :\"drop_#{type}\"\n          return send(method_name) if VersionedCommands.valid_type?(type)\n          raise ArgumentError, \"Unknown versioned command type: #{type}\"\n        end\n\n        private\n\n          # Drop all functions all at once\n          def drop_function\n            definitions = File.read(filename).scan(Regexp.new([\n              \"FUNCTION\\\\s+#{NAME_MATCH}\",\n              '\\s*(\\([_a-z0-9 ,]*\\))?',\n            ].join, 'mi'))\n\n            functions = definitions.map(&:join).join(', ')\n            execute \"DROP FUNCTION #{functions};\"\n          end\n\n          # Drop the type\n          def drop_type\n            name = File.read(filename).scan(Regexp.new(\"TYPE\\\\s+#{NAME_MATCH}\", 'mi'))\n            execute \"DROP TYPE #{name.first.first};\"\n          end\n\n          # Drop view or materialized view\n          def drop_view\n            mat, name = File.read(filename).scan(Regexp.new([\n              '(MATERIALIZED)?\\s+(?:RECURSIVE\\s+)?',\n              \"VIEW\\\\s+#{NAME_MATCH}\",\n            ].join, 'mi')).first\n\n            execute \"DROP#{' MATERIALIZED' if mat.present?} VIEW #{name};\"\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands/generator.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'rails/generators/base'\nrequire 'rails/generators/active_record/migration'\n\nmodule Torque\n  module PostgreSQL\n    module VersionedCommands\n      module Generator\n        TEMPLATES_PATH = '../../../generators/torque/templates'\n\n        attr_reader :file_name\n\n        def self.included(base)\n          type = base.name.demodulize.chomp('Generator').underscore\n\n          base.send(:source_root, File.expand_path(TEMPLATES_PATH, __dir__))\n          base.include(ActiveRecord::Generators::Migration)\n\n          base.instance_variable_set(:@type, type)\n          base.instance_variable_set(:@desc, <<~DESC.squish)\n            Generates a migration for creating, updating, or removing a #{type}.\n          DESC\n\n          base.class_option :operation, type: :string, aliases: %i(--op),\n            desc: 'The name for the operation'\n\n          base.argument :name, type: :string,\n            desc: \"The name of the #{type}\"\n        end\n\n        def type\n          self.class.instance_variable_get(:@type)\n        end\n\n        def create_migration_file\n          version = count_object_entries\n          operation = options[:operation] || (version == 0 ? 'create' : 'update')\n          @file_name = \"#{operation}_#{type}_#{name.underscore}_v#{version + 1}\"\n\n          validate_file_name!\n          migration_template \"#{type}.sql.erb\", File.join(db_migrate_path, \"#{file_name}.sql\")\n        end\n\n        def count_object_entries\n          Dir.glob(\"#{db_migrate_path}/*_#{type}_#{name.underscore}_v*.sql\").size\n        end\n\n        def validate_file_name!\n          unless /^[_a-z0-9]+$/.match?(file_name)\n            raise ActiveRecord::IllegalMigrationNameError.new(file_name)\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands/migration_context.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    class IllegalCommandTypeError < ActiveRecord::MigrationError\n      def initialize(file)\n        super(<<~MSG.squish)\n          Illegal name for command file '#{file}'. Commands are more strict and require\n          the version, one of create, update, or remove, type of object, name\n          and operation version to be present in the filename.\n          (e.g. 20250101010101_create_function_my_function_v1.sql)\n        MSG\n      end\n    end\n\n    module VersionedCommands\n      module MigrationContext\n        InvalidMigrationTimestampError = ActiveRecord::InvalidMigrationTimestampError\n        PGAdapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter\n\n        def migrations\n          return super unless running_for_pg?\n\n          commands = command_files.map do |file|\n            version, op, type, name, op_version, scope = parse_command_filename(file)\n            raise IllegalCommandTypeError.new(file) unless version\n            if validate_timestamp? && !valid_migration_timestamp?(version)\n              raise InvalidMigrationTimestampError.new(version, [op, type, name, op_version].join('_'))\n            end\n\n            version = version.to_i\n            CommandMigration.new(file, version, op, type, name, op_version.to_i, scope)\n          end\n\n          super.concat(commands).sort_by(&:version)\n        end\n\n        def migrations_status\n          return super unless running_for_pg?\n          db_list = schema_migration.normalized_versions\n\n          commands = command_files.map do |file|\n            version, op, type, name, op_version, scope = parse_command_filename(file)\n            raise IllegalCommandTypeError.new(file) unless version\n            if validate_timestamp? && !valid_migration_timestamp?(version)\n              raise InvalidMigrationTimestampError.new(version, [op, type, name, op_version].join('_'))\n            end\n\n            version = schema_migration.normalize_migration_number(version)\n            status = db_list.delete(version) ? \"up\" : \"down\"\n            [status, version, \"#{op.capitalize} #{type.capitalize} #{name}#{scope} (v#{op_version})\"]\n          end\n\n          (commands + super).uniq(&:second).sort_by(&:second)\n        end\n\n        def migration_commands\n          migrations.select { |m| m.is_a?(VersionedCommands::CommandMigration) }\n        end\n\n        private\n\n          # Checks if the current migration context is running for PostgreSQL\n          def running_for_pg?\n            connection_pool.db_config.adapter_class <= PGAdapter\n          end\n\n          # Get the list of all versioned command files\n          def command_files\n            paths = Array(migrations_paths)\n            Dir[*paths.flat_map { |path| \"#{path}/**/[0-9]*_*.sql\" }]\n          end\n\n          # Commands are more strict with the filename format\n          def parse_command_filename(filename)\n            File.basename(filename).scan(VersionedCommands.filename_regexp).first\n          end\n      end\n\n      ActiveRecord::MigrationContext.prepend(MigrationContext)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands/migrator.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module VersionedCommands\n      module Migrator\n        def execute_migration_in_transaction(migration)\n          @versioned_command = versioned_command?(migration) && migration\n          super\n        ensure\n          @versioned_command = false\n        end\n\n        def record_version_state_after_migrating(version)\n          return super if (command = @versioned_command) == false\n\n          @versioned_table ||= VersionedCommands::SchemaTable.new(connection.pool)\n          @versioned_counter ||= @versioned_table.count\n\n          if down?\n            @versioned_counter -= 1\n            @versioned_table.delete_version(command)\n            @versioned_table.drop_table if @versioned_counter.zero?\n          else\n            @versioned_table.create_table if @versioned_counter.zero?\n            @versioned_table.create_version(command)\n            @versioned_counter += 1\n          end\n        end\n\n        def versioned_command?(migration)\n          migration.is_a?(VersionedCommands::CommandMigration)\n        end\n      end\n\n      ActiveRecord::Migrator.prepend(Migrator)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands/schema_table.rb",
    "content": "# frozen_string_literal: true\n\nmodule Torque\n  module PostgreSQL\n    module VersionedCommands\n      class SchemaTable\n        attr_reader :arel_table\n\n        def initialize(pool)\n          @pool = pool\n          @arel_table = ::Arel::Table.new(table_name)\n        end\n\n        def create_version(command)\n          im = ::Arel::InsertManager.new(arel_table)\n          im.insert(\n            arel_table[primary_key] => command.version,\n            arel_table['type'] => command.type,\n            arel_table['object_name'] => command.object_name,\n          )\n\n          @pool.with_connection do |connection|\n            connection.insert(im, \"#{name} Create\", primary_key, command.version)\n          end\n        end\n\n        def delete_version(command)\n          dm = ::Arel::DeleteManager.new(arel_table)\n          dm.wheres = [arel_table[primary_key].eq(command.version.to_s)]\n\n          @pool.with_connection do |connection|\n            connection.delete(dm, \"#{name} Destroy\")\n          end\n        end\n\n        def primary_key\n          'version'\n        end\n\n        def name\n          'Torque::PostgreSQL::VersionedCommand'\n        end\n\n        def table_name\n          [\n            ActiveRecord::Base.table_name_prefix,\n            PostgreSQL.config.versioned_commands.table_name,\n            ActiveRecord::Base.table_name_suffix,\n          ].join\n        end\n\n        def create_table\n          @pool.with_connection do |connection|\n            return if connection.table_exists?(table_name)\n\n            parent = @pool.schema_migration.table_name\n            connection.create_table(table_name, inherits: parent) do |t|\n              t.string :type, null: false, index: true\n              t.string :object_name, null: false, index: true\n            end\n          end\n        end\n\n        def drop_table\n          @pool.with_connection do |connection|\n            connection.drop_table table_name, if_exists: true\n          end\n        end\n\n        def count\n          return 0 unless table_exists?\n\n          sm = ::Arel::SelectManager.new(arel_table)\n          sm.project(*FN.count(::Arel.star))\n\n          @pool.with_connection do |connection|\n            connection.select_value(sm, \"#{self.class} Count\")\n          end\n        end\n\n        def table_exists?\n          @pool.with_connection { |connection| connection.data_source_exists?(table_name) }\n        end\n\n        def versions_of(type)\n          return [] unless table_exists?\n\n          sm = ::Arel::SelectManager.new(arel_table)\n          sm.project(arel_table['object_name'], FN.count(::Arel.star).as('version'))\n          sm.where(arel_table['type'].eq(type.to_s))\n          sm.group(arel_table['object_name'])\n          sm.order(arel_table['object_name'].asc)\n\n          @pool.with_connection do |connection|\n            connection.select_rows(sm, \"#{name} Load\")\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql/versioned_commands.rb",
    "content": "# frozen_string_literal: true\n\nrequire_relative 'versioned_commands/command_migration'\nrequire_relative 'versioned_commands/migration_context'\nrequire_relative 'versioned_commands/migrator'\nrequire_relative 'versioned_commands/schema_table'\n\nmodule Torque\n  module PostgreSQL\n    # Takes advantage of Rails migrations to create other sorts of\n    # objects/commands that can also be versioned. Everything migrated will\n    # still live within Migrations borders (i.e., the schema_migrations), but\n    # the way they are handled and registered in the schema dumper is completely\n    # different\n    module VersionedCommands\n      RAILS_APP = defined?(Rails.application.paths)\n      NAME_MATCH = '\"?((?:[_a-z0-9]+\"?\\.\"?)?[_a-z0-9]+)\"?'\n\n      class << self\n        # Check if the type is current enabled\n        def valid_type?(type)\n          PostgreSQL.config.versioned_commands.types.include?(type.to_sym)\n        end\n\n        # Run the internal validations for the given type and content\n        def validate!(type, content, name)\n          method_name = :\"validate_#{type}!\"\n          return send(method_name, content, name) if valid_type?(type)\n          raise ArgumentError, \"Unknown versioned command type: #{type}\"\n        end\n\n        # Get the content of the command based on the type, name, and version\n        def fetch_command(dirs, type, name, version)\n          paths = Array.wrap(dirs).map { |d| \"#{d}/**/*_#{type}_#{name}_v#{version}.sql\" }\n          files = Dir[*paths]\n          return File.read(files.first) if files.one?\n\n          raise ArgumentError, <<~MSG.squish if files.none?\n            No previous version found for #{type} #{name}\n            of version v#{version}.\n          MSG\n\n          raise ArgumentError, <<~MSG.squish if files.many?\n            Multiple files found for #{type} #{name}\n            of version v#{version}.\n          MSG\n        end\n\n        # The regexp is dynamic due to the list of available types\n        def filename_regexp\n          @filename_regexp ||= begin\n            types = PostgreSQL.config.versioned_commands.types\n            Regexp.new([\n              \"\\\\A([0-9]+)_\",\n              \"(create|update|remove)_\",\n              \"(#{types.join('|')})_\",\n              \"([_a-z0-9]*)\",\n              \"_v([0-9]+)\",\n              \"\\\\.?([_a-z0-9]*)?\",\n              \"\\\\.sql\\\\z\",\n            ].join)\n          end\n        end\n\n        private\n\n          # Validate that the content of the command is correct\n          def validate_function!(content, name)\n            result = content.scan(Regexp.new([\n              '^\\s*CREATE\\s+(OR\\s+REPLACE)?\\s*',\n              \"FUNCTION\\\\s+#{NAME_MATCH}\",\n            ].join, 'mi'))\n\n            names = result.map(&:last).compact.uniq(&:downcase)\n            raise ArgumentError, <<~MSG.squish if names.size > 1\n              Multiple functions definition found.\n            MSG\n\n            raise ArgumentError, <<~MSG.squish unless result.all?(&:first)\n              'OR REPLACE' is required for proper migration support.\n            MSG\n\n            fn_name = names.first.downcase.sub('.', '_')\n            raise ArgumentError, <<~MSG.squish if fn_name != name.downcase\n              Function name must match file name.\n            MSG\n          end\n\n          # Validate that the content of the command is correct\n          def validate_type!(content, name)\n            creates = content.scan(Regexp.new(['^\\s*CREATE\\s+TYPE\\s+', NAME_MATCH].join, 'mi'))\n            drops = content.scan(Regexp.new([\n              '^\\s*DROP\\s+TYPE\\s+(IF\\s+EXISTS)?\\s*',\n              NAME_MATCH,\n            ].join, 'mi'))\n\n            raise ArgumentError, <<~MSG.squish if creates.size > 1\n              More than one type definition found.\n            MSG\n\n            raise ArgumentError, <<~MSG.squish if drops.size > 1\n              More than one type drop found.\n            MSG\n\n            raise ArgumentError, <<~MSG.squish if drops.empty?\n              'DROP TYPE' is required for proper migration support.\n            MSG\n\n            create_name = creates.first.last.downcase\n            raise ArgumentError, <<~MSG.squish if drops.first.last.downcase != create_name\n              Drop does not match create.\n            MSG\n\n            create_name = create_name.sub('.', '_')\n            raise ArgumentError, <<~MSG.squish if create_name != name.downcase\n              Type name must match file name.\n            MSG\n          end\n\n          # Validate that the content of the command is correct\n          def validate_view!(content, name)\n            result = content.scan(Regexp.new([\n              '^\\s*CREATE\\s+(OR\\s+REPLACE)?\\s*',\n              '((?:TEMP|TEMPORARY|MATERIALIZED)\\s+)?',\n              '(?:RECURSIVE\\s+)?',\n              \"VIEW\\\\s+#{NAME_MATCH}\",\n            ].join, 'mi'))\n\n            raise ArgumentError, <<~MSG.squish if result.empty?\n              Missing or invalid view definition.\n            MSG\n\n            raise ArgumentError, <<~MSG.squish if result.size > 1\n              More than one view definition found.\n            MSG\n\n            with_replace, opt, view_name = result.first\n            if opt&.strip == 'MATERIALIZED'\n              raise ArgumentError, <<~MSG.squish if with_replace.present?\n                Materialized view does not support 'OR REPLACE'.\n              MSG\n\n              with_drop = \"DROP MATERIALIZED VIEW IF EXISTS #{view_name};\"\n              raise ArgumentError, <<~MSG.squish unless content.include?(with_drop)\n                'DROP MATERIALIZED VIEW IF EXISTS' is required for proper migration support.\n              MSG\n            else\n              raise ArgumentError, <<~MSG.squish if with_replace.blank?\n                'OR REPLACE' is required for proper migration support.\n              MSG\n            end\n\n            view_name = view_name.downcase.sub('.', '_')\n            raise ArgumentError, <<~MSG.squish if view_name != name.downcase\n              View name must match file name.\n            MSG\n          end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/torque/postgresql.rb",
    "content": "require 'i18n'\nrequire 'ostruct'\nrequire 'active_model'\nrequire 'active_record'\nrequire 'active_support'\n\nrequire 'active_support/core_ext/date/acts_like'\nrequire 'active_support/core_ext/time/zones'\nrequire 'active_record/connection_adapters/postgresql_adapter'\n\nrequire 'torque/postgresql/config'\nrequire 'torque/postgresql/version'\nrequire 'torque/postgresql/collector'\nrequire 'torque/postgresql/geometry_builder'\nrequire 'torque/postgresql/predicate_builder'\n\nrequire 'torque/postgresql/i18n'\nrequire 'torque/postgresql/arel'\nrequire 'torque/postgresql/adapter'\nrequire 'torque/postgresql/associations'\nrequire 'torque/postgresql/attributes'\nrequire 'torque/postgresql/autosave_association'\nrequire 'torque/postgresql/inheritance'\nrequire 'torque/postgresql/base' # Needs to be after inheritance\nrequire 'torque/postgresql/insert_all'\nrequire 'torque/postgresql/migration'\nrequire 'torque/postgresql/relation'\nrequire 'torque/postgresql/reflection'\nrequire 'torque/postgresql/schema_cache'\nrequire 'torque/postgresql/table_name'\nrequire 'torque/postgresql/function'\n\nrequire 'torque/postgresql/railtie' if defined?(Rails)\n"
  },
  {
    "path": "lib/torque-postgresql.rb",
    "content": "require 'torque/postgresql'\n"
  },
  {
    "path": "spec/en.yml",
    "content": "en:\n  torque: 'Torque Rocks!'\n  activerecord:\n    attributes:\n      user:\n        role:\n          visitor: 'A simple Visitor'\n      role:\n        assistant: 'An Assistant'\n    enums:\n      content_status:\n        created: '1 - Created'\n      roles:\n        manager: 'The Manager'\n  enum:\n    content_status:\n      draft: 'Draft (2)'\n    published: 'Finally published'\n    admin: 'Super Duper Admin'\n"
  },
  {
    "path": "spec/factories/authors.rb",
    "content": "FactoryBot.define do\n  factory :author do\n    name      { Faker::Name.name }\n    specialty { Enum::Specialties.values.sample }\n  end\nend\n"
  },
  {
    "path": "spec/factories/comments.rb",
    "content": "FactoryBot.define do\n  factory :comment do\n    content { Faker::Lorem.paragraph }\n\n    factory :comment_recursive do\n      comment_id { Comment.order('RANDOM()').first.id }\n    end\n\n    trait :random_user do\n      user_id { User.order('RANDOM()').first.id }\n    end\n  end\nend\n"
  },
  {
    "path": "spec/factories/item.rb",
    "content": "FactoryBot.define do\n  factory :item do\n    name { Faker::Lorem.sentence }\n  end\nend\n"
  },
  {
    "path": "spec/factories/posts.rb",
    "content": "FactoryBot.define do\n  factory :post do\n    title   { Faker::Lorem.sentence }\n    content { Faker::Lorem.paragraph }\n  end\nend\n"
  },
  {
    "path": "spec/factories/tags.rb",
    "content": "FactoryBot.define do\n  factory :tag do\n    name { Faker::Lorem.sentence }\n  end\nend\n"
  },
  {
    "path": "spec/factories/texts.rb",
    "content": "FactoryBot.define do\n  factory :text do\n    content { Faker::Lorem.sentence }\n  end\nend\n"
  },
  {
    "path": "spec/factories/users.rb",
    "content": "FactoryBot.define do\n  factory :user do\n    name { Faker::Name.name }\n    role { 'visitor' }\n  end\nend\n"
  },
  {
    "path": "spec/factories/videos.rb",
    "content": "FactoryBot.define do\n  factory :video do\n    title { Faker::Lorem.sentence }\n  end\nend\n"
  },
  {
    "path": "spec/fixtures/migrations/20250101000001_create_users.rb",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000002_create_function_count_users_v1.sql",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000003_create_internal_users.rb",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000004_update_function_count_users_v2.sql",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000005_create_view_all_users_v1.sql",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000006_create_type_user_id_v1.sql",
    "content": ""
  },
  {
    "path": "spec/fixtures/migrations/20250101000007_remove_function_count_users_v2.sql",
    "content": ""
  },
  {
    "path": "spec/initialize.rb",
    "content": "require_relative '../lib/torque/postgresql/auxiliary_statement'\n\nrequire_relative '../lib/torque/postgresql/adapter/schema_overrides'\n\nrequire_relative '../lib/torque/postgresql/adapter/oid/box'\nrequire_relative '../lib/torque/postgresql/adapter/oid/circle'\nrequire_relative '../lib/torque/postgresql/adapter/oid/enum'\nrequire_relative '../lib/torque/postgresql/adapter/oid/enum_set'\nrequire_relative '../lib/torque/postgresql/adapter/oid/interval'\nrequire_relative '../lib/torque/postgresql/adapter/oid/line'\nrequire_relative '../lib/torque/postgresql/adapter/oid/segment'\n\nrequire_relative '../lib/torque/postgresql/attributes/enum'\nrequire_relative '../lib/torque/postgresql/attributes/enum_set'\nrequire_relative '../lib/torque/postgresql/attributes/period'\nrequire_relative '../lib/torque/postgresql/attributes/full_text_search'\n\nrequire_relative '../lib/torque/postgresql/relation/auxiliary_statement'\nrequire_relative '../lib/torque/postgresql/relation/join_series'\nrequire_relative '../lib/torque/postgresql/relation/buckets'\n\nrequire_relative '../lib/torque/postgresql/versioned_commands'\n\nmodule Torque\n  module PostgreSQL\n    ActiveRecord::Base.belongs_to_many_required_by_default = false\n\n    Attributes::Enum.include_on(ActiveRecord::Base)\n    Attributes::EnumSet.include_on(ActiveRecord::Base)\n    Attributes::Period.include_on(ActiveRecord::Base)\n    Attributes::FullTextSearch.include_on(ActiveRecord::Base)\n\n    Relation.include(Relation::AuxiliaryStatement)\n    Relation.include(Relation::JoinSeries)\n    Relation.include(Relation::Buckets)\n\n    config.versioned_commands.enabled = true\n    ActiveRecord::Schema::Definition.include(Adapter::Definition)\n\n    ::Object.const_set('TorqueCTE', AuxiliaryStatement)\n    ::Object.const_set('TorqueRecursiveCTE', AuxiliaryStatement::Recursive)\n\n    config.enum.namespace = ::Object.const_set('Enum', Module.new)\n    config.enum.namespace.define_singleton_method(:const_missing) do |name|\n      Attributes::Enum.lookup(name)\n    end\n\n    config.enum.namespace.define_singleton_method(:sample) do |name|\n      Attributes::Enum.lookup(name).sample\n    end\n\n    ar_type = ActiveRecord::Type\n    ar_type.register(:enum,     Adapter::OID::Enum,     adapter: :postgresql)\n    ar_type.register(:enum_set, Adapter::OID::EnumSet,  adapter: :postgresql)\n\n    ar_type.register(:box,      Adapter::OID::Box,      adapter: :postgresql)\n    ar_type.register(:circle,   Adapter::OID::Circle,   adapter: :postgresql)\n    ar_type.register(:line,     Adapter::OID::Line,     adapter: :postgresql)\n    ar_type.register(:segment,  Adapter::OID::Segment,  adapter: :postgresql)\n\n    ar_type.register(:interval, Adapter::OID::Interval, adapter: :postgresql)\n\n    Arel.build_operations(config.arel.infix_operators)\n\n    ActiveRecord::Base.connection.torque_load_additional_types\n  end\nend\n"
  },
  {
    "path": "spec/mocks/cache_query.rb",
    "content": "module Mocks\n  module CacheQuery\n    def get_last_executed_query(&block)\n      cache = ActiveRecord::Base.connection.query_cache\n      cache.instance_variable_set(:@enabled, true)\n\n      map = cache.instance_variable_get(:@map)\n\n      block.call\n      result = map.keys.first\n\n      cache.instance_variable_set(:@enabled, false)\n      map.delete(result)\n\n      result\n    end\n\n    def get_query_with_binds(&block)\n      result = nil\n\n      original_method = ActiveRecord::Base.connection.method(:raw_execute)\n      original_method.receiver.define_singleton_method(:raw_execute) do |*args, **xargs, &block|\n        result ||= [args.first, args.third]\n        super(*args, **xargs, &block)\n      end\n\n      block.call\n      original_method.receiver.define_singleton_method(:raw_execute, &original_method.to_proc)\n\n      result\n    end\n  end\nend\n"
  },
  {
    "path": "spec/mocks/create_table.rb",
    "content": "module Mocks\n  module CreateTable\n    def mock_create_table\n      around do |example|\n        original_method = ActiveRecord::Base.connection.method(:log)\n        original_method.receiver.define_singleton_method(:log) do |sql, *, **, &block|\n          sql\n        end\n\n        example.run\n        original_method.receiver.define_singleton_method(:log, &original_method.to_proc)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/activity.rb",
    "content": "class Activity < ActiveRecord::Base\n  belongs_to :author\nend\n"
  },
  {
    "path": "spec/models/activity_book.rb",
    "content": "require_relative 'activity'\n\nclass ActivityBook < Activity\nend\n"
  },
  {
    "path": "spec/models/activity_post/sample.rb",
    "content": "class ActivityPost < Activity\n  class Sample < ActivityPost\n  end\nend\n"
  },
  {
    "path": "spec/models/activity_post.rb",
    "content": "require_relative 'activity'\n\nclass ActivityPost < Activity\n  belongs_to :post\nend\n\nrequire_relative 'activity_post/sample'\n"
  },
  {
    "path": "spec/models/author.rb",
    "content": "class Author < ActiveRecord::Base\n  has_many :activities, -> { cast_records }\n  has_many :posts\nend\n"
  },
  {
    "path": "spec/models/author_journalist.rb",
    "content": "require_relative 'author'\n\nclass AuthorJournalist < Author\nend\n"
  },
  {
    "path": "spec/models/category.rb",
    "content": "class Category< ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/comment.rb",
    "content": "class Comment < ActiveRecord::Base\n  belongs_to :user\nend\n"
  },
  {
    "path": "spec/models/course.rb",
    "content": "class Course < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/geometry.rb",
    "content": "class Geometry < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/guest_comment.rb",
    "content": "require_relative 'comment'\n\nclass GuestComment < Comment\nend\n"
  },
  {
    "path": "spec/models/internal/user.rb",
    "content": "module Internal\n  class User < ActiveRecord::Base\n    self.schema = 'internal'\n  end\nend\n"
  },
  {
    "path": "spec/models/item.rb",
    "content": "class Item < ActiveRecord::Base\n  belongs_to_many :tags\nend\n"
  },
  {
    "path": "spec/models/post.rb",
    "content": "class Post < ActiveRecord::Base\n  belongs_to :author\n  belongs_to :activity\n\n  scope :test_scope, -> { where('1=1') }\nend\n"
  },
  {
    "path": "spec/models/question.rb",
    "content": "class Question < ActiveRecord::Base\n  self.implicit_order_column = 'created_at'\nend\n"
  },
  {
    "path": "spec/models/question_select.rb",
    "content": "require_relative 'question'\n\nclass QuestionSelect < Question\nend\n"
  },
  {
    "path": "spec/models/tag.rb",
    "content": "class Tag < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/text.rb",
    "content": "class Text < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/time_keeper.rb",
    "content": "class TimeKeeper < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/models/user.rb",
    "content": "class User < ActiveRecord::Base\n  has_many :comments\n\n  auxiliary_statement :last_comment do |cte|\n    cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)\n    cte.attributes id: :comment_id, content: :comment_content\n  end\nend\n"
  },
  {
    "path": "spec/models/video.rb",
    "content": "class Video < ActiveRecord::Base\nend\n"
  },
  {
    "path": "spec/schema.rb",
    "content": "# This file is auto-generated from the current state of the database. Instead\n# of editing this file, please use the migrations feature of Active Record to\n# incrementally modify your database, and then regenerate this schema definition.\n#\n# Note that this schema.rb definition is the authoritative source for your\n# database schema. If you need to create the application database on another\n# system, you should be using db:schema:load, not running all the migrations\n# from scratch. The latter is a flawed and unsustainable approach (the more migrations\n# you'll amass, the slower it'll run and the greater likelihood for issues).\n#\n# It's strongly recommended that you check this file into your version control system.\n\nversion = 7\n\nreturn if ActiveRecord::Migrator.current_version == version\nActiveRecord::Schema.define(version: version) do\n  self.verbose = false\n\n  # These are extensions that must be enabled in order to support this database\n  enable_extension \"pgcrypto\"\n  enable_extension \"plpgsql\"\n\n  # Custom schemas used in this database.\n  create_schema \"internal\", force: :cascade\n\n  # Custom types defined in this database.\n  # Note that some types may not work with other database engines. Be careful if changing database.\n  create_enum \"content_status\", [\"created\", \"draft\", \"published\", \"archived\"]\n  create_enum \"specialties\", [\"books\", \"movies\", \"plays\"]\n  create_enum \"roles\", [\"visitor\", \"assistant\", \"manager\", \"admin\"]\n  create_enum \"conflicts\", [\"valid\", \"invalid\", \"untrusted\"]\n  create_enum \"types\", [\"A\", \"B\", \"C\", \"D\"]\n\n  create_table \"geometries\", force: :cascade do |t|\n    t.point   \"point\"\n    t.line    \"line\"\n    t.lseg    \"lseg\"\n    t.box     \"box\"\n    t.path    \"closed_path\"\n    t.path    \"open_path\"\n    t.polygon \"polygon\"\n    t.circle  \"circle\"\n  end\n\n  create_table \"time_keepers\", force: :cascade do |t|\n    t.daterange \"available\"\n    t.tsrange   \"period\"\n    t.tstzrange \"tzperiod\"\n    t.interval  \"th\"\n  end\n\n  create_table \"tags\", force: :cascade do |t|\n    t.string \"name\"\n  end\n\n  create_table \"videos\", force: :cascade do |t|\n    t.bigint   \"tag_ids\", array: true\n    t.string   \"title\"\n    t.string   \"url\"\n    t.enum     \"type\", enum_type: :types\n    t.enum     \"conflicts\", enum_type: :conflicts, array: true\n    t.jsonb    \"metadata\"\n    # t.column   \"pieces\", :int4multirange\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n  end\n\n  create_table \"authors\", force: :cascade do |t|\n    t.string   \"name\"\n    t.string   \"type\"\n    t.enum     \"specialty\", enum_type: :specialties\n  end\n\n  create_table \"categories\", force: :cascade do |t|\n    t.integer  \"parent_id\"\n    t.string   \"title\"\n  end\n\n  create_table \"texts\", force: :cascade do |t|\n    t.integer  \"user_id\"\n    t.string   \"content\"\n    t.enum     \"conflict\", enum_type: :conflicts\n  end\n\n  create_table \"comments\", force: :cascade do |t|\n    t.integer \"user_id\", null: false\n    t.integer \"comment_id\"\n    t.integer \"video_id\"\n    t.text    \"content\", null: false\n    t.string  \"kind\"\n    t.index [\"user_id\"], name: \"index_comments_on_user_id\", using: :btree\n    t.index [\"comment_id\"], name: \"index_comments_on_comment_id\", using: :btree\n  end\n\n  create_table \"courses\", force: :cascade do |t|\n    t.integer         \"category_id\"\n    t.string          \"title\", null: false\n    t.interval        \"duration\"\n    t.enum            \"types\", enum_type: :types, array: true\n    t.search_language \"lang\", null: false, default: 'english'\n    t.search_vector   \"search_vector\", columns: :title, language: :lang\n    t.tsvector        \"unhandled\"\n    t.datetime        \"created_at\", null: false\n    t.datetime        \"updated_at\", null: false\n  end\n\n  create_table \"images\", force: :cascade, id: false do |t|\n    t.string \"file\"\n  end\n\n  create_table \"posts\", force: :cascade do |t|\n    t.integer       \"author_id\"\n    t.integer       \"activity_id\"\n    t.string        \"title\"\n    t.text          \"content\"\n    t.enum          \"status\", enum_type: :content_status\n    t.search_vector \"search_vector\", columns: %i[title content]\n    t.index [\"author_id\"], name: \"index_posts_on_author_id\", using: :btree\n  end\n\n  create_table \"items\", force: :cascade do |t|\n    t.string   \"name\"\n    t.bigint   \"tag_ids\", array: true, default: \"{1}\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n  end\n\n  create_table \"users\", force: :cascade do |t|\n    t.string   \"name\", null: false\n    t.enum     \"role\", enum_type: :roles, default: :visitor\n    t.integer  \"age\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n  end\n\n  create_table \"users\", schema: \"internal\", force: :cascade do |t|\n    t.string   \"email\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.index [\"email\"], name: \"index_internal_users_on_email\", unique: true\n  end\n\n  create_table \"activities\", force: :cascade do |t|\n    t.integer  \"author_id\"\n    t.string   \"title\"\n    t.boolean  \"active\"\n    t.enum     \"kind\", enum_type: :types\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n  end\n\n  create_table \"questions\", id: :uuid, default: -> { \"gen_random_uuid()\" }, force: :cascade do |t|\n    t.string   \"title\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n  end\n\n  create_table \"activity_books\", force: :cascade, inherits: :activities do |t|\n    t.text     \"description\"\n    t.string   \"url\"\n    t.boolean  \"activated\"\n  end\n\n  create_table \"activity_posts\", force: :cascade, inherits: [:activities, :images] do |t|\n    t.integer  \"post_id\"\n    t.string   \"url\"\n    t.integer  \"activated\"\n  end\n\n  create_table \"activity_post_samples\", force: :cascade, inherits: :activity_posts\n\n  create_table \"question_selects\", force: :cascade, inherits: :questions do |t|\n    t.string  \"options\", array: true\n  end\n\n  # create_table \"activity_blanks\", force: :cascade, inherits: :activities\n\n  # create_table \"activity_images\", force: :cascade, inherits: [:activities, :images]\n\n  add_foreign_key \"posts\", \"authors\"\nend\n\nActiveRecord::Base.connection.schema_cache.clear!\n"
  },
  {
    "path": "spec/spec_helper.rb",
    "content": "require 'torque-postgresql'\nrequire 'database_cleaner'\nrequire 'factory_bot'\nrequire 'dotenv'\nrequire 'faker'\nrequire 'rspec'\n\nbegin\n  require 'debug/prelude'\nrescue LoadError\n  # No debugger available, skip\nend\n\nDotenv.load\n\nActiveRecord::Base.establish_connection(ENV['DATABASE_URL'] || {\n  adapter: 'postgresql',\n  username: 'travis',\n  port: 5433,\n})\n\ncache = ActiveRecord::Base.connection.schema_cache\ncleaner = ->() do\n  cache.instance_variable_set(:@inheritance_loaded, false)\n  cache.instance_variable_set(:@inheritance_dependencies, {})\n  cache.instance_variable_set(:@inheritance_associations, {})\nend\n\n# Load all the files that are optional and managed by Railtie\nrequire_relative 'initialize'\n\n# This needs to come after loading all optional features\nrequire_relative 'schema'\nDir.glob(File.join('spec', '{models,factories,mocks}', '**', '*.rb')) do |file|\n  require file[5..-4]\nend\n\ncleaner.call\nI18n.load_path << Pathname.pwd.join('spec', 'en.yml')\nRSpec.configure do |config|\n  config.extend Mocks::CreateTable\n  config.include Mocks::CacheQuery\n  config.include FactoryBot::Syntax::Methods\n\n  config.formatter = :documentation\n  config.color     = true\n  config.tty       = true\n\n  # Handles acton before rspec initialize\n  config.before(:suite) do\n    Torque::PostgreSQL.config.schemas.whitelist << 'internal'\n    ActiveSupport::Deprecation.try(:silenced=, true)\n    DatabaseCleaner.clean_with(:truncation)\n  end\n\n  config.before(:each) do\n    DatabaseCleaner.strategy = :transaction\n  end\n\n  config.before(:each) do\n    DatabaseCleaner.start\n  end\n\n  config.after(:each) do\n    DatabaseCleaner.clean\n  end\n\n  config.before(:each) do\n    cleaner.call\n  end\nend\n"
  },
  {
    "path": "spec/tests/arel_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Arel' do\n  context 'on inflix operation' do\n    let(:collector) { ::Arel::Collectors::SQLString }\n    let(:attribute) { ::Arel::Table.new('a')['sample'] }\n    let(:conn) { ActiveRecord::Base.connection }\n    let(:visitor) { ::Arel::Visitors::PostgreSQL.new(conn) }\n\n    [\n      [:overlaps,            [1, 2],                       \"ARRAY[1, 2]\"],\n      [:contains,            [3, 4],                       \"ARRAY[3, 4]\"],\n      [:contained_by,        [5, 6],                       \"ARRAY[5, 6]\"],\n      [:has_key,             ::Arel.sql(\"'a'\"),            \"'a'\"],\n      [:has_all_keys,        ['b', 'c'],                   \"ARRAY['b', 'c']\"],\n      [:has_any_keys,        ['d', 'e'],                   \"ARRAY['d', 'e']\"],\n\n      [:strictly_left,       ::Arel.sql('numrange(1, 2)'), 'numrange(1, 2)'],\n      [:strictly_right,      ::Arel.sql('numrange(3, 4)'), 'numrange(3, 4)'],\n      [:doesnt_right_extend, ::Arel.sql('numrange(5, 6)'), 'numrange(5, 6)'],\n      [:doesnt_left_extend,  ::Arel.sql('numrange(7, 8)'), 'numrange(7, 8)'],\n      [:adjacent_to,         ::Arel.sql('numrange(9, 0)'), 'numrange(9, 0)'],\n    ].each do |(operation, value, quoted_value)|\n      klass_name = operation.to_s.camelize\n\n      context \"##{operation}\" do\n        let(:operator) { instance.operator }\n        let(:instance) do\n          attribute.public_send(operation, value.is_a?(Array) ? ::Arel.array(value) : value)\n        end\n\n        context 'for attribute' do\n          let(:klass) { ::Arel::Nodes.const_get(klass_name) }\n\n          it \"returns a new #{klass_name}\" do\n            expect(instance).to be_a(klass)\n          end\n        end\n\n        context 'for visitor' do\n          let(:result) { visitor.accept(instance, collector.new).value }\n\n          it 'returns a formatted operation' do\n            expect(result).to be_eql(\"\\\"a\\\".\\\"sample\\\" #{operator} #{quoted_value}\")\n          end\n        end\n      end\n    end\n  end\n\n  context 'on default value' do\n    let(:connection) { ActiveRecord::Base.connection }\n\n    after { Author.reset_column_information }\n\n    it 'does not break the change column default value method' do\n      connection.add_column(:authors, :enabled, :boolean)\n      expect { connection.change_column_default(:authors, :enabled, { from: nil, to: true }) }.not_to raise_error\n      expect(Author.columns_hash['enabled'].default).to eq('true')\n    end\n\n    it 'does not break jsonb' do\n      expect { connection.add_column(:authors, :profile, :jsonb, default: []) }.not_to raise_error\n      expect(Author.columns_hash['profile'].default).to eq('[]')\n\n      condition = Author.arel_table['profile'].is_distinct_from([])\n      expect(Author.where(condition).to_sql).to eq(<<~SQL.squish)\n        SELECT \"authors\".* FROM \"authors\" WHERE \"authors\".\"profile\" IS DISTINCT FROM '[]'\n      SQL\n    end\n\n    it 'works properly when column is an array' do\n      expect { connection.add_column(:authors, :tag_ids, :bigint, array: true, default: []) }.not_to raise_error\n      expect(Author.new.tag_ids).to eq([])\n    end\n\n    it 'works with an array with enum values for a new enum' do\n      value = ['a', 'b']\n\n      expect do\n        connection.create_enum(:samples, %i[a b c d])\n        connection.add_column(:authors, :samples, :enum, enum_type: :samples, array: true, default: value)\n      end.not_to raise_error\n\n      expect(Author.new.samples).to eq(value)\n    end\n\n    it 'works with an array with enum values for an existing enum' do\n      value = ['visitor', 'assistant']\n      expect { connection.add_column(:authors, :roles, :enum, enum_type: :roles, array: true, default: value) }.not_to raise_error\n      expect(Author.new.roles).to eq(value)\n    end\n\n    it 'works with multi dimentional array' do\n      value = [['1', '2'], ['3', '4']]\n      expect { connection.add_column(:authors, :tag_ids, :string, array: true, default: value) }.not_to raise_error\n      expect(Author.new.tag_ids).to eq(value)\n    end\n\n    it 'works with change column default value' do\n      value = ['2', '3']\n      connection.add_column(:authors, :tag_ids, :string, array: true)\n      expect { connection.change_column_default(:authors, :tag_ids, { from: nil, to: value }) }.not_to raise_error\n      expect(Author.new.tag_ids).to eq(value)\n    end\n  end\n\n  context 'on cast' do\n    it 'provides an array method' do\n      sample1 = ::Arel.array(1, 2, 3, 4)\n      sample2 = ::Arel.array([1, 2, 3, 4])\n      sample3 = ::Arel.array(1, 2, 3, 4, cast: 'bigint')\n      sample4 = ::Arel.array([1, 2, 3, 4], [5, 6, 7, 8], cast: 'integer')\n\n      expect(sample1.to_sql).to be_eql('ARRAY[1, 2, 3, 4]')\n      expect(sample2.to_sql).to be_eql('ARRAY[1, 2, 3, 4]')\n      expect(sample3.to_sql).to be_eql('ARRAY[1, 2, 3, 4]::bigint[]')\n      expect(sample4.to_sql).to be_eql('ARRAY[ARRAY[1, 2, 3, 4], ARRAY[5, 6, 7, 8]]::integer[]')\n    end\n\n    it 'provides a cast method' do\n      attribute = ::Arel::Table.new('a')['sample']\n      quoted = ::Arel::Nodes::build_quoted([1])\n      casted = ::Arel::Nodes::build_quoted(1, attribute)\n\n      expect(attribute.pg_cast('text').to_sql).to be_eql('\"a\".\"sample\"::text')\n      expect(quoted.pg_cast('bigint', true).to_sql).to be_eql('ARRAY[1]::bigint[]')\n      expect(casted.pg_cast('string').to_sql).to be_eql(\"1::string\")\n    end\n\n    it 'provides proper support to cast methods' do\n      attribute = ::Arel::Table.new('a')['sample']\n      quoted = ::Arel::Nodes::build_quoted([1])\n      casted = ::Arel::Nodes::build_quoted(1)\n\n      expect(attribute.cast('text').to_sql).to be_eql('\"a\".\"sample\"::text')\n      expect(quoted.cast('bigint', true).to_sql).to be_eql('ARRAY[1]::bigint[]')\n\n      changed_result = ActiveRecord.gem_version >= Gem::Version.new('8.0.2')\n      changed_result = changed_result ? 'CAST(1 AS string)' : '1::string'\n      expect(casted.pg_cast('string').to_sql).to be_eql(\"1::string\")\n    end\n\n    it 'properly works combined on a query' do\n      condition = Video.arel_table[:tag_ids].contains([1,2]).cast(:bigint, :array)\n      query = Video.all.where(condition).to_sql\n\n      expect(query).to include('WHERE \"videos\".\"tag_ids\" @> ARRAY[1, 2]::bigint[]')\n\n      condition = QuestionSelect.arel_table[:options].overlaps(%w[a b]).cast(:string, :array)\n      query = QuestionSelect.all.where(condition).to_sql\n\n      expect(query).to include('\"options\" && ARRAY[\\'a\\', \\'b\\']::string[]')\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/auxiliary_statement_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'AuxiliaryStatement' do\n  before :each do\n    User.auxiliary_statements_list = {}\n  end\n\n  context 'on relation' do\n    let(:klass) { User }\n    let(:true_value) { 'TRUE' }\n    subject { klass.unscoped }\n\n    it 'has its method' do\n      expect(subject).to respond_to(:with)\n    end\n\n    it 'can perform simple queries' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'can perform more complex queries' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)\n        cte.attributes content: :last_comment\n      end\n\n      result = 'WITH \"comments\" AS (SELECT DISTINCT ON ( \"comments\".\"user_id\" )'\n      result << ' \"comments\".\"content\" AS last_comment, \"comments\".\"user_id\"'\n      result << ' FROM \"comments\" ORDER BY \"comments\".\"user_id\" ASC,'\n      result << ' \"comments\".\"id\" DESC) SELECT \"users\".*,'\n      result << ' \"comments\".\"last_comment\" FROM \"users\" INNER JOIN \"comments\"'\n      result << ' ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts extra select columns' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"slug\" AS comment_slug, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_content\", \"comments\".\"comment_slug\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments, select: {slug: :comment_slug}).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts extra join columns' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\", \"comments\".\"active\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\" AND \"comments\".\"active\" = \"users\".\"active\"'\n      expect(subject.with(:comments, join: {active: :active}).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts extra conditions' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\"'\n      result << ' FROM \"comments\" WHERE \"comments\".\"active\" = $1)'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments, where: {active: true}).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts scopes from both sides' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.where(id: 1).all\n        cte.attributes content: :comment_content\n      end\n\n      query = subject.where(id: 2).with(:comments)\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\"'\n      result << ' WHERE \"comments\".\"id\" = $1)'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      result << ' WHERE \"users\".\"id\" = $2'\n\n      expect(query.arel.to_sql).to eql(result)\n      expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1, 2])\n    end\n\n    it 'accepts string as attributes' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes sql('MAX(id)') => :comment_id\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT MAX(id) AS comment_id, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_id\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts complex string as attributes' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes sql('ROW_NUMBER() OVER (PARTITION BY ORDER BY \"comments\".\"id\")') => :comment_id\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT ROW_NUMBER() OVER (PARTITION BY ORDER BY \"comments\".\"id\") AS comment_id, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_id\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts arel attribute as attributes' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes col(:id).minimum => :comment_id\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT MIN(\"comments\".\"id\") AS comment_id, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"comment_id\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts custom join properties' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n        cte.join name: :id, 'a.col' => :col\n      end\n\n      result = 'WITH \"comments\" AS (SELECT \"comments\".\"content\" AS comment_content,'\n      result << ' \"comments\".\"id\", \"comments\".\"col\" FROM \"comments\") SELECT \"users\".*,'\n      result << ' \"comments\".\"comment_content\" FROM \"users\" INNER JOIN \"comments\"'\n      result << ' ON \"comments\".\"id\" = \"users\".\"name\" AND \"comments\".\"col\" = \"a\".\"col\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'can perform other types of joins' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n        cte.join_type :left\n      end\n\n      result = 'WITH \"comments\" AS (SELECT \"comments\".\"content\" AS comment_content,'\n      result << ' \"comments\".\"user_id\" FROM \"comments\") SELECT \"users\".*,'\n      result << ' \"comments\".\"comment_content\" FROM \"users\" LEFT OUTER JOIN \"comments\"'\n      result << ' ON \"comments\".\"user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'can manually define the association' do\n      klass.has_many :sample_comment, class_name: 'Comment', foreign_key: :a_user_id\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.through :sample_comment\n        cte.attributes content: :sample_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS sample_content, \"comments\".\"a_user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comments\".\"sample_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"a_user_id\" = \"users\".\"id\"'\n      expect(subject.with(:comments).arel.to_sql).to eql(result)\n    end\n\n    it 'accepts complex scopes from dependencies' do\n      klass.send(:auxiliary_statement, :comments1) do |cte|\n        cte.query Comment.where(id: 1).all\n        cte.attributes content: :comment_content1\n      end\n\n      klass.send(:auxiliary_statement, :comments2) do |cte|\n        cte.requires :comments1\n        cte.query Comment.where(id: 2).all\n        cte.attributes content: :comment_content2\n      end\n\n      query = subject.where(id: 3).with(:comments2)\n\n      result = 'WITH '\n      result << '\"comments1\" AS (SELECT \"comments\".\"content\" AS comment_content1, \"comments\".\"user_id\" FROM \"comments\" WHERE \"comments\".\"id\" = $1), '\n      result << '\"comments2\" AS (SELECT \"comments\".\"content\" AS comment_content2, \"comments\".\"user_id\" FROM \"comments\" WHERE \"comments\".\"id\" = $2)'\n      result << ' SELECT \"users\".*, \"comments1\".\"comment_content1\", \"comments2\".\"comment_content2\" FROM \"users\"'\n      result << ' INNER JOIN \"comments1\" ON \"comments1\".\"user_id\" = \"users\".\"id\"'\n      result << ' INNER JOIN \"comments2\" ON \"comments2\".\"user_id\" = \"users\".\"id\"'\n      result << ' WHERE \"users\".\"id\" = $3'\n\n      expect(query.arel.to_sql).to eql(result)\n      expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1, 2, 3])\n    end\n\n    context 'with dependency' do\n      before :each do\n        klass.send(:auxiliary_statement, :comments1) do |cte|\n          cte.query Comment.all\n          cte.attributes content: :comment_content1\n        end\n\n        klass.send(:auxiliary_statement, :comments2) do |cte|\n          cte.requires :comments1\n          cte.query Comment.all\n          cte.attributes content: :comment_content2\n        end\n      end\n\n      it 'can requires another statement as dependency' do\n        result = 'WITH '\n        result << '\"comments1\" AS (SELECT \"comments\".\"content\" AS comment_content1, \"comments\".\"user_id\" FROM \"comments\"), '\n        result << '\"comments2\" AS (SELECT \"comments\".\"content\" AS comment_content2, \"comments\".\"user_id\" FROM \"comments\")'\n        result << ' SELECT \"users\".*, \"comments1\".\"comment_content1\", \"comments2\".\"comment_content2\" FROM \"users\"'\n        result << ' INNER JOIN \"comments1\" ON \"comments1\".\"user_id\" = \"users\".\"id\"'\n        result << ' INNER JOIN \"comments2\" ON \"comments2\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments2).arel.to_sql).to eql(result)\n      end\n\n      it 'can uses already already set dependent' do\n        result = 'WITH '\n        result << '\"comments1\" AS (SELECT \"comments\".\"content\" AS comment_content1, \"comments\".\"user_id\" FROM \"comments\"), '\n        result << '\"comments2\" AS (SELECT \"comments\".\"content\" AS comment_content2, \"comments\".\"user_id\" FROM \"comments\")'\n        result << ' SELECT \"users\".*, \"comments1\".\"comment_content1\", \"comments2\".\"comment_content2\" FROM \"users\"'\n        result << ' INNER JOIN \"comments1\" ON \"comments1\".\"user_id\" = \"users\".\"id\"'\n        result << ' INNER JOIN \"comments2\" ON \"comments2\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments1, :comments2).arel.to_sql).to eql(result)\n      end\n\n      it 'raises an error if the dependent does not exist' do\n        klass.send(:auxiliary_statement, :comments2) do |cte|\n          cte.requires :comments3\n          cte.query Comment.all\n          cte.attributes content: :comment_content2\n        end\n        expect{ subject.with(:comments2).arel.to_sql }.to raise_error(ArgumentError)\n      end\n    end\n\n    context 'query as string' do\n      it 'performs correctly' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, 'SELECT * FROM comments'\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        result = 'WITH \"comments\" AS (SELECT * FROM comments)'\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments).arel.to_sql).to eql(result)\n      end\n\n      it 'accepts arguments to format the query' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, 'SELECT * FROM comments WHERE active = %{active}'\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        result = \"WITH \\\"comments\\\" AS (SELECT * FROM comments WHERE active = #{true_value})\"\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments, args: {active: true}).arel.to_sql).to eql(result)\n      end\n\n      it 'raises an error when join columns are not given' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, 'SELECT * FROM comments'\n          cte.attributes content: :comment\n        end\n\n        expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /join columns/)\n      end\n\n      it 'not raises an error when not given the table name as first argument' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query 'SELECT * FROM comments'\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        expect{ subject.with(:comments).arel.to_sql }.not_to raise_error\n      end\n    end\n\n    context 'query as proc' do\n      it 'performs correctly for result as relation' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, -> { Comment.all }\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        result = 'WITH \"comments\" AS'\n        result << ' (SELECT \"comments\".\"content\" AS comment, \"comments\".\"user_id\" FROM \"comments\")'\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments).arel.to_sql).to eql(result)\n      end\n\n      it 'performs correctly for anything that has a call method' do\n        obj = Struct.new(:call, :arity).new('SELECT * FROM comments', 0)\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, obj\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        result = 'WITH \"comments\" AS (SELECT * FROM comments)'\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments).arel.to_sql).to eql(result)\n      end\n\n      it 'performs correctly for result as string' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, -> { 'SELECT * FROM comments' }\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        result = 'WITH \"comments\" AS (SELECT * FROM comments)'\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n        expect(subject.with(:comments).arel.to_sql).to eql(result)\n      end\n\n      it 'performs correctly when the proc requires arguments' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, -> (args) { Comment.where(id: args.id) }\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        query = subject.with(:comments, args: {id: 1})\n\n        result = 'WITH \"comments\" AS'\n        result << ' (SELECT \"comments\".\"content\" AS comment, \"comments\".\"user_id\"'\n        result << ' FROM \"comments\" WHERE \"comments\".\"id\" = $1)'\n        result << ' SELECT \"users\".*, \"comments\".\"comment\" FROM \"users\"'\n        result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n\n        expect(query.arel.to_sql).to eql(result)\n        expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1])\n      end\n\n      it 'raises an error when join columns are not given' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, -> { Author.all }\n          cte.attributes content: :comment\n        end\n\n        expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /join columns/)\n      end\n\n      it 'not raises an error when not given the table name as first argument' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query -> { Comment.all }\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        expect{ subject.with(:comments).arel.to_sql }.not_to raise_error\n      end\n\n      it 'raises an error when the result of the proc is an invalid type' do\n        klass.send(:auxiliary_statement, :comments) do |cte|\n          cte.query :comments, -> { false }\n          cte.attributes content: :comment\n          cte.join id: :user_id\n        end\n\n        expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /query objects/)\n      end\n    end\n\n    context 'with inheritance' do\n      let(:base) { Activity }\n      let(:klass) { ActivityBook }\n\n      it 'accepts ancestors auxiliary statements' do\n        base.send(:auxiliary_statement, :authors) do |cte|\n          cte.query Author.all\n          cte.attributes name: :author_name\n          cte.join author_id: :id\n        end\n\n        result = 'WITH \"authors\" AS'\n        result << ' (SELECT \"authors\".\"name\" AS author_name, \"authors\".\"id\" FROM \"authors\")'\n        result << ' SELECT \"activity_books\".*, \"authors\".\"author_name\" FROM \"activity_books\"'\n        result << ' INNER JOIN \"authors\" ON \"authors\".\"id\" = \"activity_books\".\"author_id\"'\n        expect(subject.with(:authors).arel.to_sql).to eql(result)\n      end\n\n      it 'can replace ancestors auxiliary statements' do\n        base.send(:auxiliary_statement, :authors) do |cte|\n          cte.query Author.all\n          cte.attributes name: :author_name\n          cte.join author_id: :id\n        end\n\n        klass.send(:auxiliary_statement, :authors) do |cte|\n          cte.query Author.all\n          cte.attributes type: :author_type\n          cte.join author_id: :id\n        end\n\n        result = 'WITH \"authors\" AS'\n        result << ' (SELECT \"authors\".\"type\" AS author_type, \"authors\".\"id\" FROM \"authors\")'\n        result << ' SELECT \"activity_books\".*, \"authors\".\"author_type\" FROM \"activity_books\"'\n        result << ' INNER JOIN \"authors\" ON \"authors\".\"id\" = \"activity_books\".\"author_id\"'\n        expect(subject.with(:authors).arel.to_sql).to eql(result)\n      end\n\n      it 'raises an error when no class has the auxiliary statement' do\n        expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError)\n      end\n    end\n\n    context 'recursive' do\n      let(:klass) { Course }\n\n      it 'correctly build a recursive cte' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'allows connect to be set to something different using a single value' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.connect :name\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"name\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_name\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"name\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_name\" = \"all_categories\".\"name\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'allows a complete different set of connect' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.connect left: :right\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"left\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"right\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"left\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"right\" = \"all_categories\".\"left\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'allows using an union all' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.union_all!\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION ALL'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'allows having a complete different initiator' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.where(parent_id: 5)\n          cte.join id: :parent_id\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = $1'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'can process the depth of the query' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.with_depth\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", 0 AS depth'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", (\"all_categories\".\"depth\" + 1) AS depth'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'can process and expose the depth of the query' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.with_depth 'd', start: 10, as: :category_depth\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", 10 AS d'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", (\"all_categories\".\"d\" + 1) AS d'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".*, \"all_categories\".\"d\" AS category_depth FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'can process the path of the query' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.with_path\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY[\"categories\".\"id\"]::varchar[] AS path'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY_APPEND(\"all_categories\".\"path\", \"categories\".\"id\"::varchar) AS path'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'can process and expose the path of the query' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n          cte.with_path 'p', source: :name, as: :category_path\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY[\"categories\".\"name\"]::varchar[] AS p'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY_APPEND(\"all_categories\".\"p\", \"categories\".\"name\"::varchar) AS p'\n        result << ' FROM \"categories\", \"all_categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"all_categories\".\"id\"'\n        result << ' ) SELECT \"courses\".*, \"all_categories\".\"p\" AS category_path FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'works with string queries' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query 'SELECT * FROM categories WHERE a IS NULL'\n          cte.sub_query 'SELECT * FROM categories, all_categories WHERE all_categories.a = b'\n          cte.join id: :parent_id\n        end\n\n        result = 'WITH RECURSIVE \"all_categories\" AS ('\n        result << 'SELECT * FROM categories WHERE a IS NULL'\n        result << ' UNION '\n        result << ' SELECT * FROM categories, all_categories WHERE all_categories.a = b'\n        result << ') SELECT \"courses\".* FROM \"courses\" INNER JOIN \"all_categories\"'\n        result << ' ON \"all_categories\".\"parent_id\" = \"courses\".\"id\"'\n        expect(subject.with(:all_categories).arel.to_sql).to eql(result)\n      end\n\n      it 'raises an error when query is a string and there is no sub query' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query 'SELECT * FROM categories WHERE a IS NULL'\n          cte.join id: :parent_id\n        end\n\n        expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /generate sub query/)\n      end\n\n      it 'raises an error when sub query has an invalid type' do\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query 'SELECT * FROM categories WHERE a IS NULL'\n          cte.sub_query -> { 1 }\n          cte.join id: :parent_id\n        end\n\n        expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /query and sub query objects/)\n      end\n\n      it 'raises an error when connect can be resolved automatically' do\n        allow(klass).to receive(:primary_key).and_return(nil)\n        klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|\n          cte.query Category.all\n          cte.join id: :parent_id\n        end\n\n        expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /setting up a proper way to connect/)\n      end\n    end\n\n    it 'works with count and does not add extra columns' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT COUNT(*) FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n\n      query = get_last_executed_query { subject.with(:comments).count }\n      expect(query).to eql(result)\n    end\n\n    it 'works with sum and does not add extra columns' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes id: :value\n      end\n\n      result = 'WITH \"comments\" AS'\n      result << ' (SELECT \"comments\".\"id\" AS value, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT SUM(\"comments\".\"value\") FROM \"users\"'\n      result << ' INNER JOIN \"comments\" ON \"comments\".\"user_id\" = \"users\".\"id\"'\n\n      query = get_last_executed_query { subject.with(:comments).sum(comments: :value) }\n      expect(query).to eql(result)\n    end\n\n    it 'raises an error when using an invalid type of object as query' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query :string, String\n      end\n\n      expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /object types/)\n    end\n\n    it 'raises an error when trying to use a statement that is not defined' do\n      expect{ subject.with(:does_not_exist).arel.to_sql }.to raise_error(ArgumentError)\n    end\n\n    it 'raises an error when using an invalid type of join' do\n      klass.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n        cte.join_type :invalid\n      end\n\n      expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError)\n    end\n  end\n\n  context 'on model' do\n    subject { User }\n\n    it 'has its configurator' do\n      expect(subject.protected_methods).to include(:cte)\n      expect(subject.protected_methods).to include(:auxiliary_statement)\n    end\n\n    it 'has the recursive configuration' do\n      expect(subject.protected_methods).to include(:recursive_cte)\n      expect(subject.protected_methods).to include(:recursive_auxiliary_statement)\n    end\n\n    it 'allows configure new auxiliary statements' do\n      subject.send(:auxiliary_statement, :cte1)\n      expect(subject.auxiliary_statements_list).to include(:cte1)\n      expect(subject.const_defined?('Cte1_AuxiliaryStatement')).to be_truthy\n    end\n\n    it 'has its query method' do\n      expect(subject).to respond_to(:with)\n    end\n\n    it 'returns a relation when using the method' do\n      subject.send(:auxiliary_statement, :comments) do |cte|\n        cte.query Comment.all\n        cte.attributes content: :comment_content\n      end\n      expect(subject.with(:comments)).to be_a(ActiveRecord::Relation)\n    end\n  end\n\n  context 'on external' do\n    let(:klass) { Torque::PostgreSQL::AuxiliaryStatement }\n    subject { User }\n\n    it 'has the external method available' do\n      expect(klass).to respond_to(:create)\n    end\n\n    it 'accepts simple auxiliary statement definition' do\n      sample = klass.create(Comment.all)\n      query = subject.with(sample, select: {content: :comment_content}).arel.to_sql\n\n      result = 'WITH \"comment\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comment\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comment\" ON \"comment\".\"user_id\" = \"users\".\"id\"'\n      expect(query).to eql(result)\n    end\n\n    it 'accepts a hash auxiliary statement definition' do\n      sample = klass.create(query: Comment.all, select: {content: :comment_content})\n      query = subject.with(sample).arel.to_sql\n\n      result = 'WITH \"comment\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"comment\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"comment\" ON \"comment\".\"user_id\" = \"users\".\"id\"'\n      expect(query).to eql(result)\n    end\n\n    it 'accepts a block when creating the auxiliary statement' do\n      sample = klass.create(:all_comments) do |cte|\n        cte.query Comment.all\n        cte.select content: :comment_content\n      end\n\n      result = 'WITH \"all_comments\" AS'\n      result << ' (SELECT \"comments\".\"content\" AS comment_content, \"comments\".\"user_id\" FROM \"comments\")'\n      result << ' SELECT \"users\".*, \"all_comments\".\"comment_content\" FROM \"users\"'\n      result << ' INNER JOIN \"all_comments\" ON \"all_comments\".\"user_id\" = \"users\".\"id\"'\n\n      query = subject.with(sample).arel.to_sql\n      expect(query).to eql(result)\n    end\n\n    context 'recursive' do\n      let(:klass) { Torque::PostgreSQL::AuxiliaryStatement::Recursive }\n      subject { Course }\n\n      it 'has the external method available' do\n        expect(klass).to respond_to(:create)\n      end\n\n      it 'accepts simple recursive auxiliary statement definition' do\n        settings = { join: { id: :parent_id } }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"category\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"category\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n\n      it 'accepts a connect option' do\n        settings = { join: { id: :parent_id }, connect: { a: :b } }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"a\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"b\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"a\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"category\"'\n        result << ' WHERE \"categories\".\"b\" = \"category\".\"a\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n\n      it 'accepts an union all option' do\n        settings = { join: { id: :parent_id }, union_all: true }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION ALL'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\"'\n        result << ' FROM \"categories\", \"category\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"category\".\"id\"'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n\n      it 'accepts a sub query option' do\n        settings = { join: { id: :parent_id }, sub_query: Category.where(active: true) }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\" FROM \"categories\"'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\" FROM \"categories\", \"category\" WHERE \"categories\".\"active\" = $1'\n        result << ' ) SELECT \"courses\".* FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n\n      it 'accepts a depth option' do\n        settings = { join: { id: :parent_id }, with_depth: { name: 'a', start: 5, as: 'b' } }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", 5 AS a'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", (\"category\".\"a\" + 1) AS a'\n        result << ' FROM \"categories\", \"category\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"category\".\"id\"'\n        result << ' ) SELECT \"courses\".*, \"category\".\"a\" AS b FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n\n      it 'accepts a path option' do\n        settings = { join: { id: :parent_id }, with_path: { name: 'a', source: 'b', as: 'c' } }\n        query = subject.with(klass.create(Category.all), **settings).arel.to_sql\n\n        result = 'WITH RECURSIVE \"category\" AS ('\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY[\"categories\".\"b\"]::varchar[] AS a'\n        result << ' FROM \"categories\"'\n        result << ' WHERE \"categories\".\"parent_id\" IS NULL'\n        result << ' UNION'\n        result << ' SELECT \"categories\".\"id\", \"categories\".\"parent_id\", ARRAY_APPEND(\"category\".\"a\", \"categories\".\"b\"::varchar) AS a'\n        result << ' FROM \"categories\", \"category\"'\n        result << ' WHERE \"categories\".\"parent_id\" = \"category\".\"id\"'\n        result << ' ) SELECT \"courses\".*, \"category\".\"a\" AS c FROM \"courses\" INNER JOIN \"category\"'\n        result << ' ON \"category\".\"parent_id\" = \"courses\".\"id\"'\n        expect(query).to eql(result)\n      end\n    end\n  end\n\n  context 'on settings' do\n    let(:base) { User }\n    let(:statement_klass) do\n      base.send(:auxiliary_statement, :statement)\n      base::Statement_AuxiliaryStatement\n    end\n\n    subject do\n      Torque::PostgreSQL::AuxiliaryStatement::Settings.new(base, statement_klass)\n    end\n\n    it 'has access to base' do\n      expect(subject.base).to eql(User)\n      expect(subject.base_table).to be_a(Arel::Table)\n    end\n\n    it 'has access to statement table' do\n      expect(subject.table_name).to eql('statement')\n      expect(subject.table).to be_a(Arel::Table)\n    end\n\n    it 'has access to the query arel table' do\n      subject.query Comment.all\n      expect(subject.query_table).to be_a(Arel::Table)\n    end\n\n    it 'raises an error when trying to access query table before defining the query' do\n      expect{ subject.with(:comments).arel.to_sql }.to raise_error(StandardError)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/belongs_to_many_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'BelongsToMany' do\n  context 'on model' do\n    let(:model) { Video }\n    let(:key) { :tests }\n    let(:builder) { Torque::PostgreSQL::Associations::Builder::BelongsToMany }\n    let(:reflection) { Torque::PostgreSQL::Reflection::BelongsToManyReflection }\n\n    after { model._reflections = {} }\n\n    it 'has the builder method' do\n      expect(model).to respond_to(:belongs_to_many)\n    end\n\n    it 'triggers the correct builder and relation' do\n      expect(builder).to receive(:build).with(anything, :tests, nil, {}) do |_, name, _, _|\n        ActiveRecord::Reflection.create(:belongs_to_many, name, nil, {}, model)\n      end\n\n      expect(reflection).to receive(:new).with(:tests, nil, {}, model)\n\n      model.belongs_to_many(:tests)\n    end\n\n    it 'allows setting up foreign key and primary_key as symbol' do\n      model.belongs_to_many(:tests, foreign_key: :test_ids, primary_key: :test_id)\n\n      reflection = model._reflections[key]\n      expect(reflection.foreign_key).to be_eql('test_ids')\n      expect(reflection.active_record_primary_key).to be_eql('test_id')\n    end\n  end\n\n  context 'on association' do\n    let(:other) { Tag }\n    let(:key) { :tags }\n    let(:initial) { FactoryBot.create(:tag) }\n\n    before { Video.belongs_to_many(:tags) }\n    subject { Video.create(title: 'A') }\n\n    after do\n      Video.reset_callbacks(:save)\n      Video._reflections = {}\n    end\n\n    it 'has the method' do\n      expect(subject).to respond_to(:tags)\n      expect(subject._reflections).to include(key)\n    end\n\n    it 'has correct foreign key' do\n      item = subject._reflections[key]\n      expect(item.foreign_key).to be_eql('tag_ids')\n    end\n\n    it 'loads associated records' do\n      subject.update(tag_ids: [initial.id])\n      expect(subject.tags.to_sql).to be_eql(<<-SQL.squish)\n        SELECT \"tags\".* FROM \"tags\" WHERE \"tags\".\"id\" = #{initial.id}\n      SQL\n\n      expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.tags.to_a).to be_eql([initial])\n    end\n\n    it 'can be marked as loaded' do\n      expect(subject.tags.loaded?).to be_eql(false)\n      expect(subject.tags).to respond_to(:load_target)\n      expect(subject.tags.load_target).to be_eql([])\n      expect(subject.tags.loaded?).to be_eql(true)\n    end\n\n    it 'can find specific records' do\n      records = FactoryBot.create_list(:tag, 10)\n      subject.update(tag_ids: records.map(&:id))\n      ids = records.map(&:id).sample(5)\n\n      expect(subject.tags).to respond_to(:find)\n      records = subject.tags.find(*ids)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return last n records' do\n      records = FactoryBot.create_list(:tag, 10)\n      subject.update(tag_ids: records.map(&:id))\n      ids = records.map(&:id).last(5)\n\n      expect(subject.tags).to respond_to(:last)\n      records = subject.tags.last(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return first n records' do\n      records = FactoryBot.create_list(:tag, 10)\n      subject.update(tag_ids: records.map(&:id))\n      ids = records.map(&:id).first(5)\n\n      expect(subject.tags).to respond_to(:take)\n      records = subject.tags.take(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can create the owner record with direct set items' do\n      # Having another association would break this test due to how\n      # +@new_record_before_save+ is set on autosave association\n      Video.has_many(:comments)\n\n      record = Video.create(title: 'A', tags: [initial])\n      record.reload\n\n      expect(record.tags.size).to be_eql(1)\n      expect(record.tags.first.id).to be_eql(initial.id)\n    end\n\n    it 'can keep record changes accordingly' do\n      expect(subject.tags.count).to be_eql(0)\n\n      local_previous_changes = nil\n      local_saved_changes = nil\n\n      Video.after_commit do\n        local_previous_changes = self.previous_changes.dup\n        local_saved_changes = self.saved_changes.dup\n      end\n\n      subject.update(title: 'B')\n\n      expect(local_previous_changes).to include('title')\n      expect(local_saved_changes).to include('title')\n\n      subject.tags = FactoryBot.create_list(:tag, 5)\n      subject.update(title: 'C', url: 'X')\n      subject.reload\n\n      expect(local_previous_changes).to include('title', 'url')\n      expect(local_saved_changes).to include('title', 'url')\n      expect(local_previous_changes).not_to include('tag_ids')\n      expect(local_saved_changes).not_to include('tag_ids')\n      expect(subject.tag_ids.size).to be_eql(5)\n      expect(subject.tags.count).to be_eql(5)\n    end\n\n    it 'can assign the record ids during before callback' do\n      Video.before_save { self.tags = FactoryBot.create_list(:tag, 5) }\n\n      record = Video.create(title: 'A')\n\n      expect(Tag.count).to be_eql(5)\n      expect(record.tag_ids.size).to be_eql(5)\n      expect(record.tags.count).to be_eql(5)\n    end\n\n    it 'does not trigger after commit on the associated record' do\n      called = false\n\n      tag = FactoryBot.create(:tag)\n      Tag.after_commit { called = true }\n\n      expect(called).to be_falsey\n\n      subject.tags << tag\n\n      expect(subject.tag_ids).to be_eql([tag.id])\n      expect(called).to be_falsey\n\n      Tag.reset_callbacks(:commit)\n    end\n\n    it 'can build an associated record' do\n      record = subject.tags.build(name: 'Test')\n      expect(record).to be_a(other)\n      expect(record).not_to be_persisted\n      expect(record.name).to be_eql('Test')\n      expect(subject.tags.target).to be_eql([record])\n\n      expect(subject.save && subject.reload).to be_truthy\n      expect(subject.tag_ids).to be_eql([record.id])\n      expect(subject.tags.size).to be_eql(1)\n    end\n\n    it 'can create an associated record' do\n      record = subject.tags.create(name: 'Test')\n      expect(subject.tags).to respond_to(:create!)\n\n      expect(record).to be_a(other)\n      expect(record).to be_persisted\n      expect(record.name).to be_eql('Test')\n      expect(subject.tag_ids).to be_eql([record.id])\n    end\n\n    it 'can concat records' do\n      record = FactoryBot.create(:tag)\n      subject.update(tag_ids: [record.id])\n      expect(subject.tags.size).to be_eql(1)\n\n      subject.tags.concat(other.new(name: 'Test'))\n      subject.reload\n\n      expect(subject.tags.size).to be_eql(2)\n      expect(subject.tag_ids.size).to be_eql(2)\n      expect(subject.tags.last.name).to be_eql('Test')\n    end\n\n    it 'can replace records' do\n      subject.tags << FactoryBot.create(:tag)\n      expect(subject.tags.size).to be_eql(1)\n\n      subject.tags = [other.new(name: 'Test 1')]\n      subject.reload\n\n      expect(subject.tags.size).to be_eql(1)\n      expect(subject.tags[0].name).to be_eql('Test 1')\n\n      subject.tags.replace([other.new(name: 'Test 2'), other.new(name: 'Test 3')])\n      subject.reload\n\n      expect(subject.tags.size).to be_eql(2)\n      expect(subject.tags[0].name).to be_eql('Test 2')\n      expect(subject.tags[1].name).to be_eql('Test 3')\n    end\n\n    it 'can delete specific records' do\n      subject.tags << initial\n      expect(subject.tags.size).to be_eql(1)\n\n      subject.tags.delete(initial)\n      expect(subject.tags.size).to be_eql(0)\n      expect(subject.reload.tags.size).to be_eql(0)\n    end\n\n    it 'can delete all records' do\n      subject.tags.concat(FactoryBot.create_list(:tag, 5))\n      expect(subject.tags.size).to be_eql(5)\n\n      subject.tags.delete_all\n      expect(subject.tags.size).to be_eql(0)\n    end\n\n    it 'can destroy all records' do\n      subject.tags.concat(FactoryBot.create_list(:tag, 5))\n      expect(subject.tags.size).to be_eql(5)\n\n      subject.tags.destroy_all\n      expect(subject.tags.size).to be_eql(0)\n    end\n\n    it 'can clear the array' do\n      record = Video.create(title: 'B', tags: [initial])\n      expect(record.tags.size).to be_eql(1)\n\n      record.update(tag_ids: [])\n      record.reload\n\n      expect(record.tag_ids).to be_nil\n      expect(record.tags.size).to be_eql(0)\n    end\n\n    it 'can have sum operations' do\n      records = FactoryBot.create_list(:tag, 5)\n      subject.tags.concat(records)\n\n      result = records.map(&:id).reduce(:+)\n      expect(subject.tags).to respond_to(:sum)\n      expect(subject.tags.sum(:id)).to be_eql(result)\n    end\n\n    it 'can have a pluck operation' do\n      records = FactoryBot.create_list(:tag, 5)\n      subject.tags.concat(records)\n\n      result = records.map(&:name).sort\n      expect(subject.tags).to respond_to(:pluck)\n      expect(subject.tags.pluck(:name).sort).to be_eql(result)\n    end\n\n    it 'can be markes as empty' do\n      expect(subject.tags).to respond_to(:empty?)\n      expect(subject.tags.empty?).to be_truthy\n\n      subject.tags << FactoryBot.create(:tag)\n      expect(subject.tags.empty?).to be_falsey\n    end\n\n    it 'can check if a record is included on the list' do\n      outside = FactoryBot.create(:tag)\n      inside = FactoryBot.create(:tag)\n\n      expect(subject.tags).not_to be_include(inside)\n      expect(subject.tags).not_to be_include(outside)\n\n      subject.tags << inside\n\n      expect(subject.tags).to respond_to(:include?)\n      expect(subject.tags).to be_include(inside)\n      expect(subject.tags).not_to be_include(outside)\n    end\n\n    it 'can append records' do\n      subject.tags << other.new(name: 'Test 1')\n      expect(subject.tags.size).to be_eql(1)\n\n      subject.tags << other.new(name: 'Test 2')\n      subject.update(title: 'B')\n      subject.reload\n\n      expect(subject.tags.size).to be_eql(2)\n      expect(subject.tags.last.name).to be_eql('Test 2')\n    end\n\n    it 'can clear records' do\n      subject.tags << FactoryBot.create(:tag)\n      expect(subject.tags.size).to be_eql(1)\n\n      subject.tags.clear\n      expect(subject.tags.size).to be_eql(0)\n    end\n\n    it 'can reload records' do\n      expect(subject.tags.size).to be_eql(0)\n      new_tag = FactoryBot.create(:tag)\n      subject.tags << new_tag\n\n      subject.tags.reload\n      expect(subject.tags.size).to be_eql(1)\n      expect(subject.tags.first.id).to be_eql(new_tag.id)\n\n      record = Video.create(title: 'B', tags: [new_tag])\n      record.reload\n\n      expect(record.tags.size).to be_eql(1)\n      expect(record.tags.first.id).to be_eql(new_tag.id)\n    end\n\n    it 'can preload records' do\n      records = FactoryBot.create_list(:tag, 5)\n      subject.tags.concat(records)\n\n      entries = Video.all.includes(:tags).load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.tags).to be_loaded\n      expect(entries.first.tags.size).to be_eql(5)\n    end\n\n    it 'can preload records using ActiveRecord::Associations::Preloader' do\n      records = FactoryBot.create_list(:tag, 5)\n      subject.tags.concat(records)\n\n      entries = Video.all\n      arguments = { records: entries, associations: :tags, available_records: Tag.all.to_a }\n      ActiveRecord::Associations::Preloader.new(**arguments).call\n      entries = entries.load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.tags).to be_loaded\n      expect(entries.first.tags.size).to be_eql(5)\n    end\n\n    it 'can joins records' do\n      query = Video.all.joins(:tags)\n      expect(query.to_sql).to match(/INNER JOIN \"tags\"/)\n      expect { query.load }.not_to raise_error\n    end\n\n    context 'when handling binds' do\n      let(:tag_ids) { FactoryBot.create_list(:tag, 5).map(&:id) }\n      let!(:record) { Video.new(tag_ids: tag_ids) }\n\n      it 'uses rails default with in and several binds' do\n        sql, binds = get_query_with_binds { record.tags.load }\n\n        expect(sql).to include(' WHERE \"tags\".\"id\" IN ($1, $2, $3, $4, $5)')\n        expect(binds.size).to be_eql(5)\n      end\n    end\n\n    context 'when the attribute has a default value' do\n      subject { FactoryBot.create(:item) }\n\n      it 'will always return the column default value' do\n        expect(subject.tag_ids).to be_a(Array)\n        expect(subject.tag_ids).to be_eql([1])\n      end\n\n      it 'will keep the value as an array even when the association is cleared' do\n        records = FactoryBot.create_list(:tag, 5)\n        subject.tags.concat(records)\n\n        subject.reload\n        expect(subject.tag_ids).to be_a(Array)\n        expect(subject.tag_ids).not_to be_eql([1, *records.map(&:id)])\n\n        subject.tags.clear\n        subject.reload\n        expect(subject.tag_ids).to be_a(Array)\n        expect(subject.tag_ids).to be_eql([1])\n      end\n    end\n\n    context 'when record is not persisted' do\n      let(:initial) { FactoryBot.create(:tag) }\n\n      subject { Video.new(title: 'A', tags: [initial]) }\n\n      it 'loads associated records' do\n        expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n        expect(subject.tags.to_a).to be_eql([initial])\n      end\n    end\n  end\n\n  context 'using uuid' do\n    let(:connection) { ActiveRecord::Base.connection }\n    let(:game) { Class.new(ActiveRecord::Base) }\n    let(:player) { Class.new(ActiveRecord::Base) }\n    let(:other) { player.create }\n\n    # TODO: Set as a shared example\n    before do\n      connection.create_table(:players, id: :uuid) { |t| t.string :name }\n      connection.create_table(:games, id: :uuid) { |t| t.uuid :player_ids, array: true }\n\n      game.table_name = 'games'\n      player.table_name = 'players'\n      game.belongs_to_many :players, anonymous_class: player,\n        inverse_of: false, foreign_key: :player_ids\n    end\n\n    subject { game.create }\n\n    it 'loads one associated records' do\n      subject.update(player_ids: [other.id])\n      expect(subject.players.to_sql).to be_eql(<<-SQL.squish)\n        SELECT \"players\".* FROM \"players\" WHERE \"players\".\"id\" = '#{other.id}'\n      SQL\n\n      expect(subject.players.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.players.to_a).to be_eql([other])\n    end\n\n    it 'loads several associated records' do\n      entries = [other, player.create]\n      subject.update(player_ids: entries.map(&:id))\n      expect(subject.players.to_sql).to be_eql(<<-SQL.squish)\n        SELECT \"players\".* FROM \"players\"\n        WHERE \"players\".\"id\" IN ('#{entries[0].id}', '#{entries[1].id}')\n      SQL\n\n      expect(subject.players.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.players.to_a).to be_eql(entries)\n    end\n\n    it 'can preload records' do\n      records = 5.times.map { player.create }\n      subject.players.concat(records)\n\n      entries = game.all.includes(:players).load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.players).to be_loaded\n      expect(entries.first.players.size).to be_eql(5)\n    end\n\n    it 'can joins records' do\n      query = game.all.joins(:players)\n      expect(query.to_sql).to match(/INNER JOIN \"players\"/)\n      expect { query.load }.not_to raise_error\n    end\n  end\n\n  context 'using callbacks' do\n    let(:tags) { FactoryBot.create_list(:tag, 3) }\n    let(:collectors) { Hash.new { |h, k| h[k] = [] } }\n\n    subject { Video.create(title: 'A') }\n\n    after do\n      Video.reset_callbacks(:save)\n      Video._reflections = {}\n    end\n\n    before do\n      subject.update_attribute(:tag_ids, tags.first(2).pluck(:id))\n      Video.belongs_to_many(:tags,\n        before_add:    ->(_, tag) { collectors[:before_add]    << tag },\n        after_add:     ->(_, tag) { collectors[:after_add]     << tag },\n        before_remove: ->(_, tag) { collectors[:before_remove] << tag },\n        after_remove:  ->(_, tag) { collectors[:after_remove]  << tag },\n      )\n    end\n\n    it 'works with id changes' do\n      subject.tag_ids = tags.drop(1).pluck(:id)\n      subject.save!\n\n      expect(collectors[:before_add]).to be_eql([tags.last])\n      expect(collectors[:after_add]).to be_eql([tags.last])\n\n      expect(collectors[:before_remove]).to be_eql([tags.first])\n      expect(collectors[:after_remove]).to be_eql([tags.first])\n    end\n\n    it 'works with record changes' do\n      subject.tags = tags.drop(1)\n\n      expect(collectors[:before_add]).to be_eql([tags.last])\n      expect(collectors[:after_add]).to be_eql([tags.last])\n\n      expect(collectors[:before_remove]).to be_eql([tags.first])\n      expect(collectors[:after_remove]).to be_eql([tags.first])\n    end\n  end\n\n  context 'using custom keys' do\n    let(:connection) { ActiveRecord::Base.connection }\n    let(:post) { Post }\n    let(:tag) { Tag }\n    let(:tags) { %w[a b c].map { |id| create(:tag, friendly_id: id) } }\n\n    subject { create(:post) }\n\n    before do\n      connection.add_column(:tags, :friendly_id, :string)\n      connection.add_column(:posts, :friendly_tag_ids, :string, array: true)\n      post.belongs_to_many(:tags, foreign_key: :friendly_tag_ids, primary_key: :friendly_id)\n      post.reset_column_information\n      tag.reset_column_information\n    end\n\n    after do\n      tag.reset_column_information\n      post.reset_column_information\n      post._reflections.delete(:tags)\n    end\n\n    it 'loads associated records' do\n      subject.update(friendly_tag_ids: tags.pluck(:friendly_id))\n\n      expect(subject.tags.to_sql).to be_eql(<<-SQL.squish)\n        SELECT \"tags\".* FROM \"tags\" WHERE \"tags\".\"friendly_id\" IN ('a', 'b', 'c')\n      SQL\n\n      expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.tags.to_a).to be_eql(tags)\n    end\n\n    it 'can properly assign tags' do\n      expect(subject.friendly_tag_ids).to be_blank\n\n      subject.tags = tags\n      expect(subject.friendly_tag_ids).to be_eql(%w[a b c])\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/collector_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Data collector', type: :helper do\n  let(:methods_list) { [:foo, :bar] }\n  subject { Torque::PostgreSQL::Collector.new(*methods_list) }\n\n  it 'is a class creator' do\n    expect(subject).to be_a(Class)\n  end\n\n  it 'has the requested methods' do\n    instance = subject.new\n    methods_list.each do |name|\n      expect(instance).to respond_to(name)\n      expect(instance).to respond_to(\"#{name}=\")\n    end\n  end\n\n  it 'instace values starts as nil' do\n    instance = subject.new\n    methods_list.each do |name|\n      expect(instance.send(name)).to be_nil\n    end\n  end\n\n  it 'set values on the same method' do\n    instance = subject.new\n    methods_list.each do |name|\n      expect(instance.send(name, name)).to eql(name)\n    end\n  end\n\n  it 'get value on the same method' do\n    instance = subject.new\n    methods_list.each do |name|\n      instance.send(name, name)\n      expect(instance.send(name)).to eql(name)\n    end\n  end\n\n  it 'accepts any kind of value' do\n    instance = subject.new\n\n    instance.foo 123\n    expect(instance.foo).to eql(123)\n\n    instance.foo 'chars'\n    expect(instance.foo).to eql('chars')\n\n    instance.foo :test, :test\n    expect(instance.foo).to eql([:test, :test])\n\n    instance.foo test: :test\n    expect(instance.foo).to eql({test: :test})\n\n    instance.foo nil\n    expect(instance.foo).to be_nil\n  end\nend\n"
  },
  {
    "path": "spec/tests/distinct_on_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'DistinctOn' do\n\n  context 'on relation' do\n    subject { Post.unscoped }\n\n    it 'has its method' do\n      expect(subject).to respond_to(:distinct_on)\n    end\n\n    it 'does not mess with original distinct form without select' do\n      expect(subject.distinct.to_sql).to \\\n        eql('SELECT DISTINCT \"posts\".* FROM \"posts\"')\n    end\n\n    it 'does not mess with original distinct form with select' do\n      expect(subject.select(:name).distinct.to_sql).to \\\n        eql('SELECT DISTINCT \"name\" FROM \"posts\"')\n    end\n\n    it 'is able to do the basic form' do\n      expect(subject.distinct_on(:title).to_sql).to \\\n        eql('SELECT DISTINCT ON ( \"posts\".\"title\" ) \"posts\".* FROM \"posts\"')\n    end\n\n    it 'is able to do with multiple attributes' do\n      expect(subject.distinct_on(:title, :content).to_sql).to \\\n        eql('SELECT DISTINCT ON ( \"posts\".\"title\", \"posts\".\"content\" ) \"posts\".* FROM \"posts\"')\n    end\n\n    it 'is able to do with relation' do\n      expect(subject.distinct_on(author: :name).to_sql).to \\\n        eql('SELECT DISTINCT ON ( \"authors\".\"name\" ) \"posts\".* FROM \"posts\"')\n    end\n\n    it 'is able to do with relation and multiple attributes' do\n      expect(subject.distinct_on(author: [:name, :age]).to_sql).to \\\n        eql('SELECT DISTINCT ON ( \"authors\".\"name\", \"authors\".\"age\" ) \"posts\".* FROM \"posts\"')\n    end\n\n    it 'raises with invalid relation' do\n      expect { subject.distinct_on(supervisors: :name).to_sql }.to \\\n        raise_error(ArgumentError, /Relation for/)\n    end\n\n    it 'raises with third level hash' do\n      expect { subject.distinct_on(author: [comments: :body]).to_sql }.to \\\n        raise_error(ArgumentError, /on third level/)\n    end\n  end\n\n  context 'on model' do\n    subject { Post }\n\n    it 'has its method' do\n      expect(subject).to respond_to(:distinct_on)\n    end\n\n    it 'returns a relation when using the method' do\n      expect(subject.distinct_on(:title)).to be_a(ActiveRecord::Relation)\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/tests/enum_set_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Enum' do\n  let(:connection) { ActiveRecord::Base.connection }\n  let(:attribute_klass) { Torque::PostgreSQL::Attributes::EnumSet }\n  let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }\n\n  def decorate(model, field, options = {})\n    attribute_klass.include_on(model, :enum_set)\n    model.enum_set(field, **options)\n  end\n\n  before :each do\n    Torque::PostgreSQL.config.enum.set_method = :pg_set_enum\n    Torque::PostgreSQL::Attributes::EnumSet.include_on(ActiveRecord::Base)\n\n    # Define a method to find yet to define constants\n    Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:const_missing) do |name|\n      Torque::PostgreSQL::Attributes::EnumSet.lookup(name)\n    end\n\n    # Define a helper method to get a sample value\n    Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:sample) do |name|\n      Torque::PostgreSQL::Attributes::EnumSet.lookup(name).sample\n    end\n  end\n\n  context 'on table definition' do\n    subject { table_definition.new(connection, 'articles') }\n\n    it 'can be defined as an array' do\n      subject.enum(:content_status, array: true, enum_type: :content_status)\n      expect(subject['content_status'].name).to be_eql('content_status')\n      expect(subject['content_status'].type).to be_eql(:enum)\n      expect(subject['content_status'].options[:enum_type]).to be_eql(:content_status)\n\n      array = subject['content_status'].respond_to?(:options) \\\n        ? subject['content_status'].options[:array] \\\n        : subject['content_status'].array\n\n      expect(array).to be_eql(true)\n    end\n  end\n\n  context 'on schema' do\n    let(:source) { ActiveRecord::Base.connection_pool }\n\n    let(:dump_result) do\n      ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))\n      dump_result.string\n    end\n\n    it 'can be used on tables' do\n      checker = /t\\.enum +\"conflicts\", +array: true, +enum_type: \"conflicts\"/\n      expect(dump_result).to match checker\n    end\n\n    xit 'can have a default value as an array of symbols' do\n      checker = /t\\.enum +\"types\", +default: \\[:A, :B\\], +array: true, +enum_type: \"types\"/\n      expect(dump_result).to match checker\n    end\n  end\n\n  context 'on value' do\n    subject { Enum::TypesSet }\n    let(:values) { %w(A B C D) }\n    let(:error) { Torque::PostgreSQL::Attributes::EnumSet::EnumSetError }\n    let(:mock_enum) do\n      enum_klass = Class.new(subject::EnumSource.superclass)\n      enum_klass.instance_variable_set(:@values, values << '15')\n\n      klass = Class.new(subject.superclass)\n      klass.const_set('EnumSource', enum_klass)\n      klass\n    end\n\n    it 'class exists' do\n      namespace = Torque::PostgreSQL.config.enum.namespace\n      expect(namespace.const_defined?('TypesSet')).to be_truthy\n      expect(subject.const_defined?('EnumSource')).to be_truthy\n      expect(subject < Torque::PostgreSQL::Attributes::EnumSet).to be_truthy\n    end\n\n    it 'returns the db type name' do\n      expect(subject.type_name).to be_eql('types[]')\n    end\n\n    it 'values match database values' do\n      expect(subject.values).to be_eql(values)\n    end\n\n    it 'values can be reach using fetch, as in hash enums' do\n      expect(subject).to respond_to(:fetch)\n\n      value = subject.fetch('A', 'A')\n      expect(value).to be_a(subject)\n      expect(value).to be_eql(subject.A)\n\n      value = subject.fetch('other', 'other')\n      expect(value).to be_nil\n    end\n\n    it 'values can be reach using [], as in hash enums' do\n      expect(subject).to respond_to(:[])\n\n      value = subject['A']\n      expect(value).to be_a(subject)\n      expect(value).to be_eql(subject.A)\n\n      value = subject['other']\n      expect(value).to be_nil\n    end\n\n    it 'accepts respond_to against value' do\n      expect(subject).to respond_to(:A)\n    end\n\n    it 'allows fast creation of values' do\n      value = subject.A\n      expect(value).to be_a(subject)\n    end\n\n    it 'keeps blank values as Lazy' do\n      expect(subject.new(nil)).to be_nil\n      expect(subject.new([])).to be_blank\n    end\n\n    it 'can start from nil value using lazy' do\n      lazy  = Torque::PostgreSQL::Attributes::Lazy\n      value = subject.new(nil)\n\n      expect(value.__class__).to be_eql(lazy)\n      expect(value.to_s).to be_eql('')\n      expect(value.to_i).to be_nil\n\n      expect(value.A?).to be_falsey\n    end\n\n    it 'accepts values to come from numeric as power' do\n      expect(subject.new(0)).to be_blank\n      expect(subject.new(1)).to be_eql(subject.A)\n      expect(subject.new(3)).to be_eql(subject.A | subject.B)\n      expect { subject.new(16) }.to raise_error(error, /out of bounds/)\n    end\n\n    it 'accepts values to come from numeric list' do\n      expect(subject.new([0])).to be_eql(subject.A)\n      expect(subject.new([0, 1])).to be_eql(subject.A | subject.B)\n      expect { subject.new([4]) }.to raise_error(error.superclass, /out of bounds/)\n    end\n\n    it 'accepts string initialization' do\n      expect(subject.new('A')).to be_eql(subject.A)\n      expect { subject.new('E') }.to raise_error(error.superclass, /not valid for/)\n    end\n\n    it 'allows values bitwise operations' do\n      expect((subject.A | subject.B).to_i).to be_eql(3)\n      expect((subject.A & subject.B).to_i).to be_nil\n      expect(((subject.A | subject.B) & subject.B).to_i).to be_eql(2)\n    end\n\n    it 'allows values comparison' do\n      value = subject.B | subject.C\n      expect(value).to be > subject.A\n      expect(value).to be < subject.D\n      expect(value).to be_eql(6)\n      expect(value).to_not be_eql(1)\n      expect(subject.A == mock_enum.A).to be_falsey\n    end\n\n    it 'accepts value checking' do\n      value = subject.B | subject.C\n      expect(value).to respond_to(:B?)\n      expect(value.B?).to be_truthy\n      expect(value.C?).to be_truthy\n      expect(value.A?).to be_falsey\n      expect(value.D?).to be_falsey\n    end\n\n    it 'accepts replace and bang value' do\n      value = subject.B | subject.C\n      expect(value).to respond_to(:B!)\n      expect(value.A!).to be_eql(7)\n      expect(value.replace(:D)).to be_eql(subject.D)\n    end\n\n    it 'accepts values turn into integer by its power' do\n      expect(subject.B.to_i).to be_eql(2)\n      expect(subject.C.to_i).to be_eql(4)\n    end\n\n    it 'accepts values turn into an array of integer by index' do\n      expect((subject.B | subject.C).map(&:to_i)).to be_eql([1, 2])\n    end\n\n    it 'can return a sample for resting purposes' do\n      expect(subject).to receive(:new).with(Numeric)\n      subject.sample\n    end\n  end\n\n  context 'on OID' do\n    let(:enum) { Enum::TypesSet }\n    let(:enum_source) { enum::EnumSource }\n    subject { Torque::PostgreSQL::Adapter::OID::EnumSet.new('types', enum_source) }\n\n    context 'on deserialize' do\n      it 'returns nil' do\n        expect(subject.deserialize(nil)).to be_nil\n      end\n\n      it 'returns enum' do\n        value = subject.deserialize('{B,C}')\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.B | enum.C)\n      end\n    end\n\n    context 'on serialize' do\n      it 'returns nil' do\n        expect(subject.serialize(nil)).to be_nil\n        expect(subject.serialize(0)).to be_nil\n      end\n\n      it 'returns as string' do\n        expect(subject.serialize(enum.B | enum.C)).to be_eql('{B,C}')\n        expect(subject.serialize(3)).to be_eql('{A,B}')\n      end\n    end\n\n    context 'on cast' do\n      it 'accepts nil' do\n        expect(subject.cast(nil)).to be_nil\n      end\n\n      it 'accepts invalid values as nil' do\n        expect(subject.cast([])).to be_nil\n      end\n\n      it 'accepts array of strings' do\n        value = subject.cast(['A'])\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.A)\n      end\n\n      it 'accepts array of numbers' do\n        value = subject.cast([1])\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.B)\n      end\n    end\n  end\n\n  context 'on I18n' do\n    subject { Enum::TypesSet }\n\n    it 'has the text method' do\n      expect(subject.new(0)).to respond_to(:text)\n    end\n\n    it 'brings the correct values' do\n      expect(subject.new(0).text).to be_eql('')\n      expect(subject.new(1).text).to be_eql('A')\n      expect(subject.new(2).text).to be_eql('B')\n      expect(subject.new(3).text).to be_eql('A and B')\n      expect(subject.new(7).text).to be_eql('A, B, and C')\n    end\n  end\n\n  context 'on model' do\n    let(:instance) { Course.new }\n\n    before(:each) { decorate(Course, :types) }\n\n    subject { Course }\n\n    it 'has all enum set methods' do\n      expect(subject).to  respond_to(:types)\n      expect(subject).to  respond_to(:types_keys)\n      expect(subject).to  respond_to(:types_texts)\n      expect(subject).to  respond_to(:types_options)\n\n      expect(subject).to  respond_to(:has_types)\n      expect(subject).to  respond_to(:has_any_types)\n\n      expect(instance).to respond_to(:types_text)\n\n      subject.types.each do |value|\n        value = value.underscore\n        expect(subject).to  respond_to(value)\n        expect(instance).to respond_to(value + '?')\n        expect(instance).to respond_to(value + '!')\n      end\n    end\n\n    it 'scope the model correctly' do\n      query = subject.a.to_sql\n      expect(query).to include(%{WHERE \"courses\".\"types\" @> '{A}'::types[]})\n    end\n\n    it 'has a match all scope' do\n      query = subject.has_types('B', 'A').to_sql\n      expect(query).to include(%{WHERE \"courses\".\"types\" @> '{B,A}'::types[]})\n    end\n\n    it 'has a match any scope' do\n      query = subject.has_any_types('B', 'A').to_sql\n      expect(query).to include(%{WHERE \"courses\".\"types\" && '{B,A}'::types[]})\n    end\n\n    it 'uses bind param instead of raw value' do\n      sql, binds = get_query_with_binds { subject.has_any_types('B', 'A').load }\n      expect(sql).to include('WHERE \"courses\".\"types\" && $1::types[]')\n      expect(binds.first.value).to eq(%w[B A])\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/enum_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Enum' do\n  let(:connection) { ActiveRecord::Base.connection }\n  let(:attribute_klass) { Torque::PostgreSQL::Attributes::Enum }\n  let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }\n\n  def decorate(model, field, options = {})\n    attribute_klass.include_on(model, :pg_enum)\n    model.pg_enum(field, **options)\n  end\n\n  before :each do\n    Torque::PostgreSQL.config.enum.base_method = :pg_enum\n    Torque::PostgreSQL::Attributes::Enum.include_on(ActiveRecord::Base)\n\n    # Define a method to find yet to define constants\n    Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:const_missing) do |name|\n      Torque::PostgreSQL::Attributes::Enum.lookup(name)\n    end\n\n    # Define a helper method to get a sample value\n    Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:sample) do |name|\n      Torque::PostgreSQL::Attributes::Enum.lookup(name).sample\n    end\n  end\n\n  context 'on migration' do\n    it 'can be deleted' do\n      connection.create_enum(:status, %i(foo bar))\n      expect(connection.type_exists?(:status)).to be_truthy\n\n      connection.drop_type(:status)\n      expect(connection.type_exists?(:status)).to be_falsey\n    end\n\n    it 'can be renamed' do\n      connection.rename_type(:content_status, :status)\n      expect(connection.type_exists?(:content_status)).to be_falsey\n      expect(connection.type_exists?(:status)).to be_truthy\n    end\n\n    it 'inserts values at the end' do\n      connection.create_enum(:status, %i(foo bar))\n      connection.add_enum_values(:status, %i(baz qux))\n      expect(connection.enum_values(:status)).to be_eql(['foo', 'bar', 'baz', 'qux'])\n    end\n\n    it 'inserts values in the beginning' do\n      connection.create_enum(:status, %i(foo bar))\n      connection.add_enum_values(:status, %i(baz qux), prepend: true)\n      expect(connection.enum_values(:status)).to be_eql(['baz', 'qux', 'foo', 'bar'])\n    end\n\n    it 'inserts values in the middle' do\n      connection.create_enum(:status, %i(foo bar))\n      connection.add_enum_values(:status, %i(baz), after: 'foo')\n      expect(connection.enum_values(:status)).to be_eql(['foo', 'baz', 'bar'])\n\n      connection.add_enum_values(:status, %i(qux), before: 'bar')\n      expect(connection.enum_values(:status)).to be_eql(['foo', 'baz', 'qux', 'bar'])\n    end\n\n    it 'inserts values with prefix or suffix' do\n      connection.create_enum(:status, %i(foo bar))\n      connection.add_enum_values(:status, %i(baz), prefix: true)\n      connection.add_enum_values(:status, %i(qux), suffix: 'tst')\n      expect(connection.enum_values(:status)).to be_eql(['foo', 'bar', 'status_baz', 'qux_tst'])\n    end\n  end\n\n  context 'on value' do\n    let(:values) { %w(created draft published archived) }\n    let(:error) { Torque::PostgreSQL::Attributes::Enum::EnumError }\n    let(:mock_enum) do\n      klass = Class.new(subject.superclass)\n      klass.instance_variable_set(:@values, values << '15')\n      klass\n    end\n\n    subject { Enum::ContentStatus }\n\n    it 'class exists' do\n      namespace = Torque::PostgreSQL.config.enum.namespace\n      expect(namespace.const_defined?('ContentStatus')).to be_truthy\n      expect(subject < Torque::PostgreSQL::Attributes::Enum).to be_truthy\n    end\n\n    it 'lazy loads values' do\n      expect(subject.instance_variable_defined?(:@values)).to be_falsey\n    end\n\n    it 'returns the db type name' do\n      expect(subject.type_name).to be_eql('content_status')\n    end\n\n    it 'values match database values' do\n      expect(subject.values).to be_eql(values)\n    end\n\n    it 'can return a sample value' do\n      expect(Enum).to respond_to(:sample)\n      expect(Enum::ContentStatus).to respond_to(:sample)\n      expect(Enum::ContentStatus.sample).to satisfy { |v| values.include?(v) }\n      expect(Enum.sample(:content_status)).to satisfy { |v| values.include?(v) }\n    end\n\n    it 'values can be iterated by using each direct on class' do\n      expect(subject).to respond_to(:each)\n      expect(subject.each).to be_a(Enumerator)\n      expect(subject.each.entries).to be_eql(values)\n    end\n\n    it 'values can be reach using fetch, as in hash enums' do\n      expect(subject).to respond_to(:fetch)\n\n      value = subject.fetch('archived', 'archived')\n      expect(value).to be_a(subject)\n      expect(value).to be_eql(subject.archived)\n\n      value = subject.fetch('other', 'other')\n      expect(value).to be_nil\n    end\n\n    it 'values can be reach using [], as in hash enums' do\n      expect(subject).to respond_to(:[])\n\n      value = subject['archived']\n      expect(value).to be_a(subject)\n      expect(value).to be_eql(subject.archived)\n\n      value = subject['other']\n      expect(value).to be_nil\n    end\n\n    it 'accepts respond_to against value' do\n      expect(subject).to respond_to(:archived)\n    end\n\n    it 'allows fast creation of values' do\n      value = subject.draft\n      expect(value).to be_a(subject)\n    end\n\n    it 'keeps blank values as Lazy' do\n      expect(subject.new(nil)).to be_nil\n      expect(subject.new([])).to be_nil\n      expect(subject.new('')).to be_nil\n    end\n\n    it 'can start from nil value using lazy' do\n      lazy  = Torque::PostgreSQL::Attributes::Lazy\n      value = subject.new(nil)\n\n      expect(value.__class__).to be_eql(lazy)\n      expect(value.to_s).to be_eql('')\n      expect(value.to_i).to be_nil\n\n      expect(value.draft?).to be_falsey\n    end\n\n    it 'accepts values to come from numeric' do\n      expect(subject.new(0)).to be_eql(subject.created)\n      expect { subject.new(5) }.to raise_error(error, /out of bounds/)\n    end\n\n    it 'accepts string initialization' do\n      expect(subject.new('created')).to be_eql(subject.created)\n      expect { subject.new('updated') }.to raise_error(error, /not valid for/)\n    end\n\n    it 'allows values comparison' do\n      value = subject.draft\n      expect(value).to be > subject.created\n      expect(value).to be < subject.archived\n      expect(value).to be_eql(subject.draft)\n      expect(value).to_not be_eql(subject.published)\n    end\n\n    it 'allows values comparison with string' do\n      value = subject.draft\n      expect(value).to be > :created\n      expect(value).to be < :archived\n      expect(value).to be_eql(:draft)\n      expect(value).to_not be_eql(:published)\n    end\n\n    it 'allows values comparison with symbol' do\n      value = subject.draft\n      expect(value).to be > 'created'\n      expect(value).to be < 'archived'\n      expect(value).to be_eql('draft')\n      expect(value).to_not be_eql('published')\n    end\n\n    it 'allows values comparison with number' do\n      value = subject.draft\n      expect(value).to be > 0\n      expect(value).to be < 3\n      expect(value).to be_eql(1)\n      expect(value).to_not be_eql(2.5)\n    end\n\n    it 'does not allow cross-enum comparison' do\n      expect { subject.draft < mock_enum.published }.to raise_error(error, /^Comparison/)\n      expect { subject.draft > mock_enum.created }.to raise_error(error, /^Comparison/)\n    end\n\n    it 'does not allow other types comparison' do\n      expect { subject.draft > true }.to raise_error(error, /^Comparison/)\n      expect { subject.draft < [] }.to raise_error(error, /^Comparison/)\n    end\n\n    it 'accepts value checking' do\n      value = subject.draft\n      expect(value).to respond_to(:archived?)\n      expect(value.draft?).to be_truthy\n      expect(value.published?).to be_falsey\n    end\n\n    it 'accepts replace and bang value' do\n      value = subject.draft\n      expect(value).to respond_to(:archived!)\n      expect(value.archived!).to be_eql(subject.archived)\n      expect(value.replace('created')).to be_eql(subject.created)\n    end\n\n    it 'accepts values turn into integer by its index' do\n      mock_value = mock_enum.new('15')\n      expect(subject.created.to_i).to be_eql(0)\n      expect(subject.archived.to_i).to be_eql(3)\n      expect(mock_value.to_i).to_not be_eql(15)\n      expect(mock_value.to_i).to be_eql(4)\n    end\n\n    context 'on members' do\n      it 'has enumerable operations' do\n        expect(subject).to respond_to(:all?)\n        expect(subject).to respond_to(:any?)\n        expect(subject).to respond_to(:collect)\n        expect(subject).to respond_to(:count)\n        expect(subject).to respond_to(:cycle)\n        expect(subject).to respond_to(:detect)\n        expect(subject).to respond_to(:drop)\n        expect(subject).to respond_to(:drop_while)\n        expect(subject).to respond_to(:each)\n        expect(subject).to respond_to(:each_with_index)\n        expect(subject).to respond_to(:entries)\n        expect(subject).to respond_to(:find)\n        expect(subject).to respond_to(:find_all)\n        expect(subject).to respond_to(:find_index)\n        expect(subject).to respond_to(:first)\n        expect(subject).to respond_to(:flat_map)\n        expect(subject).to respond_to(:include?)\n        expect(subject).to respond_to(:inject)\n        expect(subject).to respond_to(:lazy)\n        expect(subject).to respond_to(:map)\n        expect(subject).to respond_to(:member?)\n        expect(subject).to respond_to(:one?)\n        expect(subject).to respond_to(:reduce)\n        expect(subject).to respond_to(:reject)\n        expect(subject).to respond_to(:reverse_each)\n        expect(subject).to respond_to(:select)\n        expect(subject).to respond_to(:sort)\n        expect(subject).to respond_to(:zip)\n      end\n\n      it 'works with map' do\n        result = subject.map(&:to_i)\n        expect(result).to be_eql([0, 1, 2, 3])\n      end\n    end\n  end\n\n  context 'on OID' do\n    let(:enum) { Enum::ContentStatus }\n    subject { Torque::PostgreSQL::Adapter::OID::Enum.new('content_status') }\n\n    context 'on deserialize' do\n      it 'returns nil' do\n        expect(subject.deserialize(nil)).to be_nil\n      end\n\n      it 'returns enum' do\n        value = subject.deserialize('created')\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.created)\n      end\n    end\n\n    context 'on serialize' do\n      it 'returns nil' do\n        expect(subject.serialize(nil)).to be_nil\n        expect(subject.serialize('test')).to be_nil\n        expect(subject.serialize(15)).to be_nil\n      end\n\n      it 'returns as string' do\n        expect(subject.serialize(enum.created)).to be_eql('created')\n        expect(subject.serialize(1)).to be_eql('draft')\n      end\n    end\n\n    context 'on cast' do\n      it 'accepts nil' do\n        expect(subject.cast(nil)).to be_nil\n      end\n\n      it 'accepts invalid values as nil' do\n        expect(subject.cast(false)).to be_nil\n        expect(subject.cast(true)).to be_nil\n        expect(subject.cast([])).to be_nil\n      end\n\n      it 'accepts string' do\n        value = subject.cast('created')\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.created)\n      end\n\n      it 'accepts numeric' do\n        value = subject.cast(1)\n        expect(value).to be_a(enum)\n        expect(value).to be_eql(enum.draft)\n      end\n    end\n  end\n\n  context 'on I18n' do\n    subject { Enum::ContentStatus }\n\n    it 'has the text method' do\n      expect(subject.new(0)).to respond_to(:text)\n    end\n\n    it 'brings the correct values' do\n      expect(subject.new(0).text).to be_eql('1 - Created')\n      expect(subject.new(1).text).to be_eql('Draft (2)')\n      expect(subject.new(2).text).to be_eql('Finally published')\n      expect(subject.new(3).text).to be_eql('Archived')\n    end\n  end\n\n  context 'on model' do\n    let(:instance) { FactoryBot.build(:user) }\n\n    before(:each) { decorate(User, :role) }\n\n    subject { User }\n\n    it 'has all enum methods' do\n      expect(subject).to  respond_to(:roles)\n      expect(subject).to  respond_to(:roles_keys)\n      expect(subject).to  respond_to(:roles_texts)\n      expect(subject).to  respond_to(:roles_options)\n      expect(instance).to respond_to(:role_text)\n\n      subject.roles.each do |value|\n        expect(subject).to  respond_to(value)\n        expect(instance).to respond_to(value + '?')\n        expect(instance).to respond_to(value + '!')\n      end\n    end\n\n    it 'plural method brings the list of values' do\n      result = subject.roles\n      expect(result).to be_a(Array)\n      expect(result).to be_eql(Enum::Roles.values)\n    end\n\n    it 'text value now uses model and attribute references' do\n      instance.role = :visitor\n      expect(instance.role_text).to be_eql('A simple Visitor')\n\n      instance.role = :assistant\n      expect(instance.role_text).to be_eql('An Assistant')\n\n      instance.role = :manager\n      expect(instance.role_text).to be_eql('The Manager')\n\n      instance.role = :admin\n      expect(instance.role_text).to be_eql('Super Duper Admin')\n    end\n\n    it 'has scopes correctly applied' do\n      subject.roles.each do |value|\n        expect(subject.send(value).to_sql).to match(/WHERE \"users\".\"role\" = '#{value}'/)\n      end\n    end\n\n    it 'has scopes available on associations' do\n      author = FactoryBot.create(:author)\n      FactoryBot.create(:post, author: author)\n\n      decorate(Post, :status)\n      expect(author.posts).to respond_to(:test_scope)\n\n      Enum::ContentStatus.each do |value|\n        expect(author.posts).to be_a(ActiveRecord::Associations::CollectionProxy)\n        expect(author.posts).to respond_to(value.to_sym)\n        expect(author.posts.send(value).to_sql).to match(/AND \"posts\".\"status\" = '#{value}'/)\n      end\n    end\n\n    it 'ask methods work' do\n      instance.role = :assistant\n      expect(instance.manager?).to be_falsey\n      expect(instance.assistant?).to be_truthy\n    end\n\n    it 'bang methods work' do\n      instance.admin!\n      expect(instance.persisted?).to be_truthy\n\n      updated_at = instance.updated_at\n      Torque::PostgreSQL.config.enum.save_on_bang = false\n      instance.visitor!\n      Torque::PostgreSQL.config.enum.save_on_bang = true\n\n      expect(instance.role).to be_eql(:visitor)\n      expect(instance.updated_at).to be_eql(updated_at)\n\n      instance.reload\n      expect(instance.role).to be_eql(:admin)\n    end\n\n    it 'raises when starting an enum with conflicting methods' do\n      Torque::PostgreSQL.config.enum.raise_conflicting = true\n      AText = Class.new(ActiveRecord::Base)\n      AText.table_name = 'texts'\n\n      expect { decorate(AText, :conflict) }.to raise_error(ArgumentError, /already exists in/)\n      Torque::PostgreSQL.config.enum.raise_conflicting = false\n    end\n\n    it 'scope the model correctly' do\n      query = subject.manager.to_sql\n      expect(query).to match(/\"users\".\"role\" = 'manager'/)\n    end\n\n    context 'on inherited classes' do\n      it 'has all enum methods' do\n        klass = Class.new(User)\n        instance = klass.new\n\n        expect(klass).to    respond_to(:roles)\n        expect(klass).to    respond_to(:roles_keys)\n        expect(klass).to    respond_to(:roles_texts)\n        expect(klass).to    respond_to(:roles_options)\n        expect(instance).to respond_to(:role_text)\n\n        klass.roles.each do |value|\n          expect(klass).to    respond_to(value)\n          expect(instance).to respond_to(value + '?')\n          expect(instance).to respond_to(value + '!')\n        end\n      end\n    end\n\n    context 'without autoload' do\n      subject { Author }\n      let(:instance) { FactoryBot.build(:author) }\n\n      it 'has both rails original enum and the new pg_enum' do\n        expect(subject).to respond_to(:enum)\n        expect(subject).to respond_to(:pg_enum)\n        expect(subject.method(:pg_enum).arity).to eql(-1)\n      end\n\n      it 'does not create all methods' do\n        AAuthor = Class.new(ActiveRecord::Base)\n        AAuthor.table_name = 'authors'\n\n        expect(AAuthor).to_not respond_to(:specialties)\n        expect(AAuthor).to_not respond_to(:specialties_keys)\n        expect(AAuthor).to_not respond_to(:specialties_texts)\n        expect(AAuthor).to_not respond_to(:specialties_options)\n        expect(AAuthor.instance_methods).to_not include(:specialty_text)\n\n        Enum::Specialties.values.each do |value|\n          expect(AAuthor).to_not respond_to(value)\n          expect(AAuthor.instance_methods).to_not include(value + '?')\n          expect(AAuthor.instance_methods).to_not include(value + '!')\n        end\n      end\n\n      it 'can be manually initiated' do\n        decorate(Author, :specialty)\n        expect(subject).to  respond_to(:specialties)\n        expect(subject).to  respond_to(:specialties_keys)\n        expect(subject).to  respond_to(:specialties_texts)\n        expect(subject).to  respond_to(:specialties_options)\n        expect(instance).to respond_to(:specialty_text)\n\n        Enum::Specialties.values.each do |value|\n          expect(subject).to  respond_to(value)\n          expect(instance).to respond_to(value + '?')\n          expect(instance).to respond_to(value + '!')\n        end\n      end\n    end\n\n    context 'with prefix' do\n      before(:each) { decorate(Author, :specialty, prefix: 'in') }\n      subject { Author }\n      let(:instance) { FactoryBot.build(:author) }\n\n      it 'creates all methods correctly' do\n        expect(subject).to  respond_to(:specialties)\n        expect(subject).to  respond_to(:specialties_keys)\n        expect(subject).to  respond_to(:specialties_texts)\n        expect(subject).to  respond_to(:specialties_options)\n        expect(instance).to respond_to(:specialty_text)\n\n        subject.specialties.each do |value|\n          expect(subject).to  respond_to('in_' + value)\n          expect(instance).to respond_to('in_' + value + '?')\n          expect(instance).to respond_to('in_' + value + '!')\n        end\n      end\n    end\n\n    context 'with suffix, only, and except' do\n      before(:each) do\n        decorate(Author, :specialty, suffix: 'expert', only: %w(books movies), except: 'books')\n      end\n\n      subject { Author }\n      let(:instance) { FactoryBot.build(:author) }\n\n      it 'creates only the requested methods' do\n        expect(subject).to  respond_to('movies_expert')\n        expect(instance).to respond_to('movies_expert?')\n        expect(instance).to respond_to('movies_expert!')\n\n        expect(subject).to_not  respond_to('books_expert')\n        expect(instance).to_not respond_to('books_expert?')\n        expect(instance).to_not respond_to('books_expert!')\n\n        expect(subject).to_not  respond_to('plays_expert')\n        expect(instance).to_not respond_to('plays_expert?')\n        expect(instance).to_not respond_to('plays_expert!')\n\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/full_text_seach_test.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'FullTextSearch' do\n  context 'on builder' do\n    let(:builder) { Torque::PostgreSQL::Attributes::Builder }\n\n    describe '.to_search_weights' do\n      it 'works with a single column' do\n        expect(builder.to_search_weights('title')).to eq({ 'title' => 'A' })\n        expect(builder.to_search_weights(:title)).to eq({ 'title' => 'A' })\n      end\n\n      it 'works with an array of columns' do\n        value = { 'title' => 'A', 'content' => 'B' }\n        expect(builder.to_search_weights(%w[title content])).to eq(value)\n        expect(builder.to_search_weights(%i[title content])).to eq(value)\n      end\n\n      it 'works with a hash of columns and weights' do\n        value = { 'title' => 'A', 'content' => 'B', 'summary' => 'C' }\n        expect(builder.to_search_weights(value.transform_keys(&:to_sym))).to eq(value)\n      end\n\n      it 'works with a hash of columns and invalid weights' do\n        value = { 'title' => 'X', 'content' => 'Y', 'summary' => 'Z' }\n        expect(builder.to_search_weights(value.transform_keys(&:to_sym))).to eq(value)\n      end\n    end\n\n    describe '.to_search_vector_operation' do\n      it 'builds a simple one' do\n        result = builder.to_search_vector_operation('english', { 'title' => 'A' })\n        expect(result.to_sql).to eq(\"TO_TSVECTOR('english', COALESCE(title, ''))\")\n      end\n\n      it 'builds with 2 columns' do\n        columns = { 'title' => 'A', 'content' => 'B' }\n        result = builder.to_search_vector_operation('english', columns)\n        expect(result.to_sql).to eq(<<~SQL.squish)\n          SETWEIGHT(TO_TSVECTOR('english', COALESCE(title, '')), 'A') ||\n          SETWEIGHT(TO_TSVECTOR('english', COALESCE(content, '')), 'B')\n        SQL\n      end\n\n      it 'builds with a dynamic language' do\n        columns = { 'title' => 'A', 'content' => 'B' }\n        result = builder.to_search_vector_operation(:lang, columns)\n        expect(result.to_sql).to eq(<<~SQL.squish)\n          SETWEIGHT(TO_TSVECTOR(lang, COALESCE(title, '')), 'A') ||\n          SETWEIGHT(TO_TSVECTOR(lang, COALESCE(content, '')), 'B')\n        SQL\n      end\n    end\n\n    describe '.search_vector_options' do\n      it 'correctly translates the settings' do\n        options = builder.search_vector_options(columns: 'title')\n        expect(options).to eq(\n          type: :tsvector,\n          as: \"TO_TSVECTOR('english', COALESCE(title, ''))\",\n          stored: true,\n        )\n      end\n\n      it 'properly adds the index type' do\n        options = builder.search_vector_options(columns: 'title', index: true)\n        expect(options).to eq(\n          type: :tsvector,\n          as: \"TO_TSVECTOR('english', COALESCE(title, ''))\",\n          stored: true,\n          index: { using: :gin },\n        )\n      end\n    end\n  end\n\n  context 'on schema dumper' do\n    let(:connection) { ActiveRecord::Base.connection }\n    let(:source) { ActiveRecord::Base.connection_pool }\n    let(:dump_result) do\n      ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))\n      dump_result.string\n    end\n\n    it 'properly supports search language' do\n      parts = %{t.search_language \"lang\", default: \"english\", null: false}\n      expect(dump_result).to include(parts)\n    end\n\n    it 'properly translates a simple single search vector with embedded language' do\n      parts = 't.search_vector \"search_vector\", stored: true'\n      parts << ', language: :lang, columns: :title'\n      expect(dump_result).to include(parts)\n    end\n\n    it 'properly translates a simple multiple column search vector with language' do\n      parts = 't.search_vector \"search_vector\", stored: true'\n      parts << ', language: \"english\", columns: [:title, :content]'\n      expect(dump_result).to include(parts)\n    end\n\n    it 'supports a custom definition of weights' do\n      connection.create_table :custom_search do |t|\n        t.string :title\n        t.string :content\n        t.string :subtitle\n        t.search_vector :sample_a, columns: {\n          title: 'A',\n          subtitle: 'A',\n          content: 'B',\n        }\n        t.search_vector :sample_b, columns: {\n          title: 'A',\n          subtitle: 'C',\n          content: 'D',\n        }\n        t.search_vector :sample_c, columns: {\n          title: 'C',\n          subtitle: 'B',\n          content: 'A',\n        }\n      end\n\n      parts = 't.search_vector \"sample_a\", stored: true'\n      parts << ', language: \"english\", columns: { title: \"A\", subtitle: \"A\", content: \"B\" }'\n      expect(dump_result).to include(parts)\n\n      parts = 't.search_vector \"sample_b\", stored: true'\n      parts << ', language: \"english\", columns: { title: \"A\", subtitle: \"C\", content: \"D\" }'\n      expect(dump_result).to include(parts)\n\n      parts = 't.search_vector \"sample_c\", stored: true'\n      parts << ', language: \"english\", columns: [:content, :subtitle, :title]'\n      expect(dump_result).to include(parts)\n    end\n  end\n\n  context 'on config' do\n    let(:base) { Course }\n    let(:scope) { 'full_text_search' }\n\n    let(:mod) { base.singleton_class.included_modules.first }\n\n    after { mod.send(:undef_method, scope) if scope.present? }\n\n    it 'has the initialization method' do\n      scope.replace('')\n      expect(base).to respond_to(:torque_search_for)\n    end\n\n    it 'properly generates the search scope' do\n      base.torque_search_for(:search_vector)\n      expect(base.all).to respond_to(:full_text_search)\n    end\n\n    it 'works with prefix and suffix' do\n      scope.replace('custom_full_text_search_scope')\n      base.torque_search_for(:search_vector, prefix: 'custom', suffix: 'scope')\n      expect(base.all).to respond_to(:custom_full_text_search_scope)\n    end\n  end\n\n  context 'on relation' do\n    let(:base) { Course }\n    let(:scope) { 'full_text_search' }\n\n    let(:mod) { base.singleton_class.included_modules.first }\n\n    before { Course.torque_search_for(:search_vector) }\n    after { mod.send(:undef_method, :full_text_search) }\n\n    it 'performs a simple query' do\n      result = Course.full_text_search('test')\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can include the order' do\n      result = Course.full_text_search('test', order: true)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('english', 'test')\"\n      parts << ' ORDER BY TS_RANK(\"courses\".\"search_vector\",'\n      parts << \" PHRASETO_TSQUERY('english', 'test')) ASC\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can include the order descending' do\n      result = Course.full_text_search('test', order: :desc)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('english', 'test')\"\n      parts << ' ORDER BY TS_RANK(\"courses\".\"search_vector\",'\n      parts << \" PHRASETO_TSQUERY('english', 'test')) DESC\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can include the rank' do\n      result = Course.full_text_search('test', rank: true)\n      parts = 'SELECT \"courses\".*, TS_RANK(\"courses\".\"search_vector\",'\n      parts << \" PHRASETO_TSQUERY('english', 'test')) AS rank\"\n      parts << ' FROM \"courses\" WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can include the rank named differently' do\n      result = Course.full_text_search('test', rank: :custom_rank)\n      parts = 'SELECT \"courses\".*, TS_RANK(\"courses\".\"search_vector\",'\n      parts << \" PHRASETO_TSQUERY('english', 'test')) AS custom_rank\"\n      parts << ' FROM \"courses\" WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can use default query mode' do\n      result = Course.full_text_search('test', mode: :default)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" TO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can use plain query mode' do\n      result = Course.full_text_search('test', mode: :plain)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PLAINTO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can use web query mode' do\n      result = Course.full_text_search('test', mode: :web)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" WEBSEARCH_TO_TSQUERY('english', 'test')\"\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can use a attribute as the language' do\n      result = Course.full_text_search('test', language: :lang)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << %{ PHRASETO_TSQUERY(\"courses\".\"lang\", 'test')}\n      expect(result.to_sql).to eql(parts)\n    end\n\n    it 'can call a method to pull the language' do\n      Course.define_singleton_method(:search_language) { 'portuguese' }\n      result = Course.full_text_search('test', language: :search_language)\n      parts = 'SELECT \"courses\".* FROM \"courses\"'\n      parts << ' WHERE \"courses\".\"search_vector\" @@'\n      parts << \" PHRASETO_TSQUERY('portuguese', 'test')\"\n      expect(result.to_sql).to eql(parts)\n      Course.singleton_class.undef_method(:search_language)\n    end\n\n    it 'properly binds all provided values' do\n      query = Course.full_text_search('test')\n      sql, binds = get_query_with_binds { query.load }\n      expect(sql).to include(\"PHRASETO_TSQUERY($1, $2)\")\n      expect(binds.first.value).to eq('english')\n      expect(binds.second.value).to eq('test')\n    end\n\n    it 'raises an error when the language is not found' do\n      expect do\n        Course.full_text_search('test', language: '')\n      end.to raise_error(ArgumentError, /Unable to determine language/)\n    end\n\n    it 'raises an error when the mode is invalid' do\n      expect do\n        Course.full_text_search('test', mode: :invalid)\n      end.to raise_error(ArgumentError, /Invalid mode :invalid for full text search/)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/function_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Function' do\n  let(:helper) { Torque::PostgreSQL::FN }\n  let(:conn) { ActiveRecord::Base.connection }\n  let(:visitor) { ::Arel::Visitors::PostgreSQL.new(conn) }\n  let(:collector) { ::Arel::Collectors::SQLString }\n\n  context 'on helper' do\n    it 'helps creating a bind' do\n      type = ::ActiveRecord::Type::String.new\n      expect(helper.bind(:foo, 'test', type)).to be_a(::Arel::Nodes::BindParam)\n    end\n\n    it 'helps creating a bind for a model attribute' do\n      expect(helper.bind_for(Video, :title, 'test')).to be_a(::Arel::Nodes::BindParam)\n    end\n\n    it 'helps creating a bind for an arel attribute' do\n      attr = Video.arel_table['title']\n      expect(helper.bind_with(attr, 'test')).to be_a(::Arel::Nodes::BindParam)\n    end\n\n    it 'helps concatenating arguments' do\n      values = %w[a b c].map(&::Arel.method(:sql))\n\n      # Unable to just call .sql with a simple thing\n      visited = visitor.accept(helper.concat(values[0]), collector.new)\n      expect(visited.value).to eq(\"a\")\n\n      # 2+ we can call .sql directly\n      expect(helper.concat(values[0], values[1]).to_sql).to eq(\"a || b\")\n      expect(helper.concat(values[0], values[1], values[2]).to_sql).to eq(\"a || b || c\")\n    end\n\n    it 'helps building any other function' do\n      values = %w[a b c].map(&::Arel.method(:sql))\n      expect(helper).to respond_to(:coalesce)\n      expect(helper.coalesce(values[0], values[1]).to_sql).to eq(\"COALESCE(a, b)\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/geometric_builder_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Geometries' do\n  context 'on build' do\n    let(:klass) do\n      klass = Class.new(Torque::PostgreSQL::GeometryBuilder)\n      klass.define_singleton_method(:name) { 'TestSample' }\n      klass.const_set('PIECES', %i[a b c d].freeze)\n      klass.const_set('FORMATION', '(%s, %s, <%s, {%s}>)'.freeze)\n      klass\n    end\n\n    let(:instance) { klass.new }\n\n    context '#type' do\n      it 'originally does not have the constant defined' do\n        expect(klass.constants).not_to include('TYPE')\n      end\n\n      it 'creates the type constant based on the name' do\n        expect(instance.type).to be_eql(:test_sample)\n        expect(klass.constants).to include(:TYPE)\n        expect(klass::TYPE).to be_eql(:test_sample)\n      end\n\n      it 'returns the constant value' do\n        klass.const_set('TYPE', 'another_type')\n        expect(instance.type).to be_eql('another_type')\n      end\n    end\n\n    context '#pieces' do\n      it 'returns the definition pieces' do\n        expect(instance.pieces).to be_eql([:a, :b, :c, :d])\n      end\n\n      it 'returns whatever is in the constant' do\n        klass.send(:remove_const, 'PIECES')\n        klass.const_set('PIECES', %i[a].freeze)\n        expect(instance.pieces).to be_eql([:a])\n      end\n    end\n\n    context '#formation' do\n      it 'returns the definition set' do\n        expect(instance.formation).to be_eql(\"(%s, %s, <%s, {%s}>)\")\n      end\n\n      it 'returns whatever is in the constant' do\n        klass.send(:remove_const, 'FORMATION')\n        klass.const_set('FORMATION', '(<%s>)'.freeze)\n        expect(instance.formation).to be_eql(\"(<%s>)\")\n      end\n    end\n\n    context '#cast' do\n      let(:config_class) { double }\n\n      before { allow(instance).to receive(:config_class).and_return(config_class) }\n\n      it 'accepts string values' do\n        expect(instance.cast('')).to be_nil\n\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)\n        expect(instance.cast('1, 2, 3, 4')).to be_eql(4)\n\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(8)\n        expect(instance.cast('(1, {2}, <3>, 4)')).to be_eql(8)\n\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(7)\n        expect(instance.cast('1, 2, 3, 4, 5, 6')).to be_eql(7)\n\n        expect(config_class).to receive(:new).with(1.0, 2.0, 3.0, 4.0).and_return(1)\n        expect(instance.cast('1.0, 2.0, 3.0, 4.0')).to be_eql(1)\n\n        expect { instance.cast(['6 6 6']) }.to raise_error(RuntimeError, 'Invalid format')\n      end\n\n      it 'accepts hash values' do\n        expect(instance.cast({})).to be_nil\n\n        expect { instance.cast({ 'a' => 1, 'b' => 2 }) }.to raise_error(RuntimeError, 'Invalid format')\n\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)\n        expect(instance.cast({ 'a' => 1, 'b' => 2 , 'c' => 3, 'd' => 4})).to be_eql(4)\n\n        expect(config_class).to receive(:new).with(1.0, 2.0, 3.0, 4.0).and_return(5)\n        expect(instance.cast({ 'a' => 1.0, 'b' => 2.0, 'c' => 3.0, 'd' => 4.0})).to be_eql(5)\n\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(2)\n        expect(instance.cast({ a: 1, b: 2 , c: 3, d: 4, e: 5, f: 6})).to be_eql(2)\n      end\n\n      it 'accepts array values' do\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)\n        expect(instance.cast([1, 2, 3, 4])).to be_eql(4)\n\n        expect(config_class).to receive(:new).with(1.1, 1.2, 1.3, 1.4).and_return(9)\n        expect(instance.cast(['1.1', '1.2', '1.3', '1.4'])).to be_eql(9)\n\n        expect(config_class).to receive(:new).with(6, 5, 4, 3).and_return(2)\n        expect(instance.cast([6, 5, 4, 3, 2, 1])).to be_eql(2)\n\n        expect(instance.cast([])).to be_nil\n\n        expect { instance.cast([6, 5, 4]) }.to raise_error(RuntimeError, 'Invalid format')\n      end\n    end\n\n    context '#serialize' do\n      before { allow(instance).to receive(:config_class).and_return(OpenStruct) }\n\n      it 'return value nil' do\n        expect(instance.serialize(nil)).to be_nil\n      end\n\n      it 'accepts config class' do\n        expect(instance.serialize(OpenStruct.new)).to be_nil\n        expect(instance.serialize(OpenStruct.new(a: 1, b: 2, c: 3, d: 4))).to be_eql('(1, 2, <3, {4}>)')\n        expect(instance.serialize(OpenStruct.new(a: 1, b: 2, c: 3, d: 4, e: 5))).to be_eql('(1, 2, <3, {4}>)')\n      end\n\n      it 'accepts hash value' do\n        expect { instance.cast({a: 1, b: 2, c: 3}) }.to raise_error(RuntimeError, 'Invalid format')\n        expect(instance.serialize({a: 1, b: 2, c: 3, d: 4})).to be_eql('(1, 2, <3, {4}>)')\n        expect(instance.serialize({a: 1, b: 2, c: 3, d: 4, e: 5, f: 6})).to be_eql('(1, 2, <3, {4}>)')\n      end\n\n      it 'accepts array value' do\n        expect { instance.serialize([6, 5, 4]) }.to raise_error(RuntimeError, 'Invalid format')\n        expect(instance.serialize([1, 2, 3, 4])).to be_eql('(1, 2, <3, {4}>)')\n        expect(instance.serialize([5, 4, 3, 2, 1, 0])).to be_eql('(5, 4, <3, {2}>)')\n      end\n\n    end\n\n    context '#deserialize' do\n      let(:config_class) { double }\n\n      before { allow(instance).to receive(:config_class).and_return(config_class) }\n\n      it 'return value nil' do\n        expect(instance.deserialize(nil)).to be_nil\n      end\n\n      it 'accept correct format' do\n        expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(6)\n        expect(instance.deserialize('(1, 2, <3, {4}>)')).to be_eql(6)\n      end\n    end\n\n    context '#type_cast_for_schema' do\n      before { allow(instance).to receive(:config_class).and_return(OpenStruct) }\n\n      it 'returns the array for schema' do\n        result = instance.type_cast_for_schema(OpenStruct.new(a: 1, b: 2, c: 3, d: 4))\n        expect(result).to be_eql([1, 2, 3, 4])\n      end\n    end\n  end\n\n  context 'on box' do\n    let(:klass) { Torque::PostgreSQL::Adapter::OID::Box }\n    let(:value_klass) { Torque::PostgreSQL::Box }\n    let(:instance) { klass.new }\n    let(:value_instance) { instance.cast([1, 2, 3, 4]) }\n\n    before { allow(instance).to receive(:config_class).and_return(value_klass) }\n\n    it '#points' do\n      mock_klass = Struct.new(:a, :b)\n      Torque::PostgreSQL.config.geometry.point_class = mock_klass\n\n      result = value_instance.points\n      expect(result).to be_a(Array)\n      expect(result.size).to be_eql(4)\n      expect(result).to all(be_a(mock_klass))\n\n      expect(result[0].a).to be_eql(1.0)\n      expect(result[0].b).to be_eql(2.0)\n      expect(result[1].a).to be_eql(1.0)\n      expect(result[1].b).to be_eql(4.0)\n      expect(result[2].a).to be_eql(3.0)\n      expect(result[2].b).to be_eql(2.0)\n      expect(result[3].a).to be_eql(3.0)\n      expect(result[3].b).to be_eql(4.0)\n    end\n  end\n\n  context 'on circle' do\n    let(:klass) { Torque::PostgreSQL::Adapter::OID::Circle }\n    let(:value_klass) { Torque::PostgreSQL::Circle }\n    let(:instance) { klass.new }\n    let(:value_instance) { instance.cast([1, 2, 3]) }\n\n    before { allow(instance).to receive(:config_class).and_return(value_klass) }\n\n    it '#center' do\n      mock_klass = Struct.new(:a, :b)\n      Torque::PostgreSQL.config.geometry.point_class = mock_klass\n\n      result = value_instance.center\n      expect(result).to be_a(mock_klass)\n      expect(result.a).to be_eql(1.0)\n      expect(result.b).to be_eql(2.0)\n    end\n\n    it '#center=' do\n      mock_klass = Struct.new(:x, :y)\n      Torque::PostgreSQL.config.geometry.point_class = mock_klass\n\n      value_instance.center = [1, 2]\n      expect(value_instance.x).to be_eql(1)\n      expect(value_instance.y).to be_eql(2)\n\n      value_instance.center = mock_klass.new(3, 4)\n      expect(value_instance.x).to be_eql(3)\n      expect(value_instance.y).to be_eql(4)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/has_many_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'HasMany' do\n  context 'on builder' do\n    let(:builder) { ActiveRecord::Associations::Builder::HasMany }\n\n    it 'adds the array option' do\n      expect(builder.send(:valid_options, {})).to include(:array)\n    end\n  end\n\n  context 'on original' do\n    let(:other) { Text }\n    let(:key) { :texts }\n\n    before { User.has_many :texts }\n    subject { User.create(name: 'User 1') }\n    after { User._reflections = {} }\n\n    it 'has the method' do\n      expect(subject).to respond_to(:texts)\n      expect(subject._reflections).to include(key)\n    end\n\n    it 'has correct foreign key' do\n      item = subject._reflections[key]\n      expect(item.foreign_key).to be_eql('user_id')\n    end\n\n    it 'loads associated records' do\n      expect(subject.texts.to_sql).to match(Regexp.new(<<-SQL.squish))\n        SELECT \"texts\"\\\\.\\\\* FROM \"texts\" WHERE \\\\(?\"texts\"\\\\.\"user_id\" = #{subject.id}\\\\)?\n      SQL\n\n      expect(subject.texts.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.texts.to_a).to be_eql([])\n    end\n\n    it 'can be marked as loaded' do\n      expect(subject.texts.loaded?).to be_eql(false)\n      expect(subject.texts).to respond_to(:load_target)\n      expect(subject.texts.load_target).to be_eql([])\n      expect(subject.texts.loaded?).to be_eql(true)\n    end\n\n    it 'can find specific records' do\n      records = FactoryBot.create_list(:text, 10, user_id: subject.id)\n      ids = records.map(&:id).sample(5)\n\n      expect(subject.texts).to respond_to(:find)\n      records = subject.texts.find(*ids)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return last n records' do\n      records = FactoryBot.create_list(:text, 10, user_id: subject.id)\n      ids = records.map(&:id).last(5)\n\n      expect(subject.texts).to respond_to(:last)\n      records = subject.texts.last(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return first n records' do\n      records = FactoryBot.create_list(:text, 10, user_id: subject.id)\n      ids = records.map(&:id).first(5)\n\n      expect(subject.texts).to respond_to(:take)\n      records = subject.texts.take(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can build an associated record' do\n      record = subject.texts.build(content: 'Test')\n      expect(record).to be_a(other)\n      expect(record).not_to be_persisted\n      expect(record.content).to be_eql('Test')\n      expect(record.user_id).to be_eql(subject.id)\n\n      expect(subject.save).to be_truthy\n      expect(subject.texts.size).to be_eql(1)\n    end\n\n    it 'can create an associated record' do\n      record = subject.texts.create(content: 'Test')\n      expect(subject.texts).to respond_to(:create!)\n\n      expect(record).to be_a(other)\n      expect(record).to be_persisted\n      expect(record.content).to be_eql('Test')\n      expect(record.user_id).to be_eql(subject.id)\n    end\n\n    it 'can concat records' do\n      FactoryBot.create(:text, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(1)\n\n      subject.texts.concat(other.new(content: 'Test'))\n      expect(subject.texts.size).to be_eql(2)\n      expect(subject.texts.last.content).to be_eql('Test')\n    end\n\n    it 'can replace records' do\n      FactoryBot.create(:text, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(1)\n\n      subject.texts.replace([other.new(content: 'Test 1'), other.new(content: 'Test 2')])\n      expect(subject.texts.size).to be_eql(2)\n      expect(subject.texts[0].content).to be_eql('Test 1')\n      expect(subject.texts[1].content).to be_eql('Test 2')\n    end\n\n    it 'can delete all records' do\n      FactoryBot.create_list(:text, 5, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(5)\n\n      subject.texts.delete_all\n      expect(subject.texts.size).to be_eql(0)\n    end\n\n    it 'can destroy all records' do\n      FactoryBot.create_list(:text, 5, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(5)\n\n      subject.texts.destroy_all\n      expect(subject.texts.size).to be_eql(0)\n    end\n\n    it 'can have sum operations' do\n      result = FactoryBot.create_list(:text, 5, user_id: subject.id).map(&:id).reduce(:+)\n      expect(subject.texts).to respond_to(:sum)\n      expect(subject.texts.sum(:id)).to be_eql(result)\n    end\n\n    it 'can have a pluck operation' do\n      result = FactoryBot.create_list(:text, 5, user_id: subject.id).map(&:content).sort\n      expect(subject.texts).to respond_to(:pluck)\n      expect(subject.texts.pluck(:content).sort).to be_eql(result)\n    end\n\n    it 'can be markes as empty' do\n      expect(subject.texts).to respond_to(:empty?)\n      expect(subject.texts.empty?).to be_truthy\n\n      FactoryBot.create(:text, user_id: subject.id)\n      expect(subject.texts.empty?).to be_falsey\n    end\n\n    it 'can check if a record is included on the list' do\n      inside = FactoryBot.create(:text, user_id: subject.id)\n      outside = FactoryBot.create(:text)\n\n      expect(subject.texts).to respond_to(:include?)\n      expect(subject.texts.include?(inside)).to be_truthy\n      expect(subject.texts.include?(outside)).to be_falsey\n    end\n\n    it 'can append records' do\n      FactoryBot.create(:text, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(1)\n\n      subject.texts << other.new(content: 'Test')\n      expect(subject.texts.size).to be_eql(2)\n      expect(subject.texts.last.content).to be_eql('Test')\n    end\n\n    it 'can clear records' do\n      FactoryBot.create(:text, user_id: subject.id)\n      expect(subject.texts.size).to be_eql(1)\n\n      subject.texts.clear\n      expect(subject.texts.size).to be_eql(0)\n    end\n\n    it 'can reload records' do\n      expect(subject.texts.size).to be_eql(0)\n      FactoryBot.create(:text, user_id: subject.id)\n\n      expect(subject.texts.size).to be_eql(0)\n\n      subject.texts.reload\n      expect(subject.texts.size).to be_eql(1)\n    end\n\n    it 'can preload records' do\n      FactoryBot.create_list(:text, 5, user_id: subject.id)\n      entries = User.all.includes(:texts).load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.texts).to be_loaded\n      expect(entries.first.texts.size).to be_eql(5)\n    end\n\n    it 'can joins records' do\n      query = User.all.joins(:texts)\n      expect(query.to_sql).to match(/INNER JOIN \"texts\"/)\n      expect { query.load }.not_to raise_error\n    end\n\n    context 'with query constraint' do\n      let(:activity) { Activity.create! }\n\n      before do\n        skip('Only Rails 7.1 onwards') unless Post.respond_to?(:query_constraints)\n\n        Post.query_constraints :author_id, :id\n        Activity.query_constraints :author_id, :id\n        Activity.has_many :posts\n      end\n\n      after do\n        Post.instance_variable_set(:@has_query_constraints, false)\n        Post.instance_variable_set(:@query_constraints_list, nil)\n        Post.instance_variable_set(:@_query_constraints_list, nil)\n        Activity.instance_variable_set(:@has_query_constraints, false)\n        Activity.instance_variable_set(:@query_constraints_list, nil)\n        Activity.instance_variable_set(:@_query_constraints_list, nil)\n      end\n\n      it 'properly preload records' do\n        FactoryBot.create_list(:post, 5, activity: activity)\n        entries = Activity.all.includes(:posts).load\n\n        expect(entries.size).to be_eql(1)\n        expect(entries.first.posts).to be_loaded\n        expect(entries.first.posts.size).to be_eql(5)\n      end\n\n      it 'properly preload records using preloader' do\n        FactoryBot.create_list(:post, 5, activity: activity)\n        entries = ActiveRecord::Associations::Preloader.new(\n          records: Activity.all,\n          associations: [:posts],\n        ).call.first.records_by_owner\n\n        expect(entries.size).to be_eql(1)\n        expect(entries.values.first.size).to be_eql(5)\n      end\n    end\n  end\n\n  context 'on array' do\n    let(:other) { Video }\n    let(:key) { :videos }\n\n    before { Tag.has_many :videos, array: true }\n    subject { Tag.create(name: 'A') }\n    after { Tag._reflections = {} }\n\n    it 'has the method' do\n      expect(subject).to respond_to(:videos)\n      expect(subject._reflections).to include(key)\n    end\n\n    it 'has correct foreign key' do\n      item = subject._reflections[key]\n      expect(item.foreign_key).to be_eql('tag_ids')\n    end\n\n    it 'loads associated records' do\n      expect(subject.videos.to_sql).to eq(<<~SQL.squish)\n        SELECT \"videos\".* FROM \"videos\" WHERE #{subject.id} = ANY(\"videos\".\"tag_ids\")\n      SQL\n\n      expect(subject.videos.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.videos.to_a).to be_eql([])\n    end\n\n    it 'uses binds instead of the literal value' do\n      query = subject.videos\n      sql, binds = get_query_with_binds { query.load }\n      expect(sql).to include('WHERE $1 = ANY(\"videos\".\"tag_ids\")')\n      expect(binds.first.value).to eq(subject.id)\n    end\n\n    it 'can be marked as loaded' do\n      expect(subject.videos.loaded?).to be_eql(false)\n      expect(subject.videos).to respond_to(:load_target)\n      expect(subject.videos.load_target).to be_eql([])\n      expect(subject.videos.loaded?).to be_eql(true)\n    end\n\n    it 'can find specific records' do\n      records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])\n      ids = records.map(&:id).sample(5)\n\n      expect(subject.videos).to respond_to(:find)\n      records = subject.videos.find(*ids)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return last n records' do\n      records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])\n      ids = records.map(&:id).last(5)\n\n      expect(subject.videos).to respond_to(:last)\n      records = subject.videos.last(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can return first n records' do\n      records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])\n      ids = records.map(&:id).first(5)\n\n      expect(subject.videos).to respond_to(:take)\n      records = subject.videos.take(5)\n\n      expect(records.size).to be_eql(5)\n      expect(records.map(&:id).sort).to be_eql(ids.sort)\n    end\n\n    it 'can build an associated record' do\n      record = subject.videos.build(title: 'Test')\n      expect(record).to be_a(other)\n      expect(record).not_to be_persisted\n      expect(record.title).to be_eql('Test')\n\n      expect(subject.save).to be_truthy\n      expect(record.tag_ids).to be_eql([subject.id])\n      expect(subject.videos.size).to be_eql(1)\n    end\n\n    it 'can create an associated record' do\n      record = subject.videos.create(title: 'Test')\n      expect(subject.videos).to respond_to(:create!)\n\n      expect(record).to be_a(other)\n      expect(record).to be_persisted\n      expect(record.title).to be_eql('Test')\n      expect(record.tag_ids).to be_eql([subject.id])\n    end\n\n    it 'can perist after accessed in after_create' do\n      other.belongs_to_many(:tags)\n      other.after_create { self.tags.to_a }\n\n      video = FactoryBot.create(:video)\n      subject.videos << video\n\n      expect(subject.reload.videos.size).to eql(1)\n      expect(video.reload.tags.size).to eql(1)\n\n      other.reset_callbacks(:create)\n      other._reflections = {}\n    end\n\n    it 'can concat records' do\n      FactoryBot.create(:video, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(1)\n\n      subject.videos.concat(other.new(title: 'Test'))\n      expect(subject.videos.size).to be_eql(2)\n      expect(subject.videos.last.title).to be_eql('Test')\n    end\n\n    it 'can replace records' do\n      FactoryBot.create(:video, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(1)\n\n      subject.videos.replace([other.new(title: 'Test 1'), other.new(title: 'Test 2')])\n      expect(subject.videos.size).to be_eql(2)\n      expect(subject.videos[0].title).to be_eql('Test 1')\n      expect(subject.videos[1].title).to be_eql('Test 2')\n    end\n\n    it 'can delete all records' do\n      FactoryBot.create_list(:video, 5, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(5)\n\n      subject.videos.delete_all\n      expect(subject.videos.size).to be_eql(0)\n    end\n\n    it 'can destroy all records' do\n      FactoryBot.create_list(:video, 5, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(5)\n\n      subject.videos.destroy_all\n      expect(subject.videos.size).to be_eql(0)\n    end\n\n    it 'can have sum operations' do\n      result = FactoryBot.create_list(:video, 5, tag_ids: [subject.id]).map(&:id).reduce(:+)\n      expect(subject.videos).to respond_to(:sum)\n      expect(subject.videos.sum(:id)).to be_eql(result)\n    end\n\n    it 'can have a pluck operation' do\n      result = FactoryBot.create_list(:video, 5, tag_ids: [subject.id]).map(&:title).sort\n      expect(subject.videos).to respond_to(:pluck)\n      expect(subject.videos.pluck(:title).sort).to be_eql(result)\n    end\n\n    it 'can be markes as empty' do\n      expect(subject.videos).to respond_to(:empty?)\n      expect(subject.videos.empty?).to be_truthy\n\n      FactoryBot.create(:video, tag_ids: [subject.id])\n      expect(subject.videos.empty?).to be_falsey\n    end\n\n    it 'can check if a record is included on the list' do\n      inside = FactoryBot.create(:video, tag_ids: [subject.id])\n      outside = FactoryBot.create(:video)\n\n      expect(subject.videos).to respond_to(:include?)\n      expect(subject.videos.include?(inside)).to be_truthy\n      expect(subject.videos.include?(outside)).to be_falsey\n    end\n\n    it 'can append records' do\n      FactoryBot.create(:video, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(1)\n\n      subject.videos << other.new(title: 'Test')\n      expect(subject.videos.size).to be_eql(2)\n      expect(subject.videos.last.title).to be_eql('Test')\n    end\n\n    it 'can clear records' do\n      FactoryBot.create(:video, tag_ids: [subject.id])\n      expect(subject.videos.size).to be_eql(1)\n\n      subject.videos.clear\n      expect(subject.videos.size).to be_eql(0)\n    end\n\n    it 'can reload records' do\n      expect(subject.videos.size).to be_eql(0)\n      FactoryBot.create(:video, tag_ids: [subject.id])\n\n      expect(subject.videos.size).to be_eql(0)\n\n      subject.videos.reload\n      expect(subject.videos.size).to be_eql(1)\n    end\n\n    it 'can preload records' do\n      FactoryBot.create_list(:video, 5, tag_ids: [subject.id])\n      entries = Tag.all.includes(:videos).load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.videos).to be_loaded\n      expect(entries.first.videos.size).to be_eql(5)\n    end\n\n    it 'can joins records' do\n      query = Tag.all.joins(:videos)\n      expect(query.to_sql).to match(/INNER JOIN \"videos\"/)\n      expect { query.load }.not_to raise_error\n    end\n  end\n\n  context 'using uuid' do\n    let(:connection) { ActiveRecord::Base.connection }\n    let(:game) { Class.new(ActiveRecord::Base) }\n    let(:player) { Class.new(ActiveRecord::Base) }\n\n    # TODO: Set as a shred example\n    before do\n      connection.create_table(:players, id: :uuid) { |t| t.string :name }\n      connection.create_table(:games, id: :uuid) { |t| t.uuid :player_ids, array: true }\n\n      game.table_name = 'games'\n      player.table_name = 'players'\n      player.has_many :games, array: true, anonymous_class: game,\n        inverse_of: false, foreign_key: :player_ids\n    end\n\n    subject { player.create }\n\n    it 'loads associated records' do\n      expect(subject.games.to_sql).to eq(<<~SQL.squish)\n        SELECT \"games\".* FROM \"games\"\n        WHERE '#{subject.id}' = ANY(\"games\".\"player_ids\")\n      SQL\n\n      expect(subject.games.load).to be_a(ActiveRecord::Associations::CollectionProxy)\n      expect(subject.games.to_a).to be_eql([])\n    end\n\n    it 'uses binds instead of the literal value' do\n      query = subject.games\n      sql, binds = get_query_with_binds { query.load }\n      expect(sql).to include('WHERE $1 = ANY(\"games\".\"player_ids\")')\n      expect(binds.first.value).to eq(subject.id)\n    end\n\n    it 'can preload records' do\n      5.times { game.create(player_ids: [subject.id]) }\n      entries = player.all.includes(:games).load\n\n      expect(entries.size).to be_eql(1)\n      expect(entries.first.games).to be_loaded\n      expect(entries.first.games.size).to be_eql(5)\n    end\n\n    it 'can joins records' do\n      query = player.all.joins(:games)\n      expect(query.to_sql).to match(/INNER JOIN \"games\"/)\n      expect { query.load }.not_to raise_error\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/insert_all_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'InsertAll' do\n  context 'on executing' do\n    before do\n      ActiveRecord::InsertAll.send(:public, :to_sql)\n      allow_any_instance_of(ActiveRecord::InsertAll).to receive(:execute, &:to_sql)\n    end\n\n    subject { Tag }\n\n    let(:entries) { [{ name: 'A' }, { name: 'B' }] }\n\n    it 'does not mess with insert_all' do\n      result = subject.insert_all(entries)\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT  DO NOTHING RETURNING \"id\"\n      SQL\n\n      result = subject.insert_all(entries, returning: %i[name])\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT  DO NOTHING RETURNING \"name\"\n      SQL\n\n      result = subject.insert_all(entries, returning: %i[id name])\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT  DO NOTHING RETURNING \"id\",\"name\"\n      SQL\n    end\n\n    it 'does not mess with insert_all!' do\n      result = subject.insert_all!(entries)\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B') RETURNING \"id\"\n      SQL\n\n      result = subject.insert_all!(entries, returning: %i[name])\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B') RETURNING \"name\"\n      SQL\n    end\n\n    it 'does not mess with upsert without where' do\n      result = subject.upsert_all(entries)\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT (\"id\") DO UPDATE SET \"name\"=excluded.\"name\"\n        RETURNING \"id\"\n      SQL\n\n      result = subject.upsert_all(entries, returning: %i[name])\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT (\"id\") DO UPDATE SET \"name\"=excluded.\"name\"\n        RETURNING \"name\"\n      SQL\n    end\n\n    it 'does add the where condition without the returning clause' do\n      result = subject.upsert_all(entries, returning: false, where: '1=1')\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT (\"id\") DO UPDATE SET \"name\"=excluded.\"name\"\n        WHERE 1=1\n      SQL\n    end\n\n    it 'does add the where condition with the returning clause' do\n      result = subject.upsert_all(entries, where: '1=1')\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT (\"id\") DO UPDATE SET \"name\"=excluded.\"name\"\n        WHERE 1=1 RETURNING \"id\"\n      SQL\n    end\n\n    xit 'dows work with model-based where clause' do\n      result = subject.upsert_all(entries, where: Tag.where(name: 'C'))\n      expect(result.squish).to be_eql(<<~SQL.squish)\n        INSERT INTO \"tags\" (\"name\") VALUES ('A'), ('B')\n        ON CONFLICT (\"id\") DO UPDATE SET \"name\"=excluded.\"name\"\n        WHERE \"tags\".\"name\" = 'C' RETURNING \"id\"\n      SQL\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/interval_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Interval' do\n  let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }\n  let(:connection) { ActiveRecord::Base.connection }\n  let(:source) { ActiveRecord::Base.connection_pool }\n\n  context 'on settings' do\n    it 'must be set to ISO 8601' do\n      expect(connection.select_value('SHOW IntervalStyle')).to eql('iso_8601')\n    end\n  end\n\n  context 'on table definition' do\n    subject { table_definition.new(connection, 'articles') }\n\n    it 'has the interval method' do\n      expect(subject).to respond_to(:interval)\n    end\n\n    it 'can define an interval column' do\n      subject.interval('duration')\n      expect(subject['duration'].name).to eql('duration')\n      expect(subject['duration'].type).to eql(:interval)\n    end\n  end\n\n  context 'on schema' do\n    it 'can be used on tables too' do\n      dump_io = StringIO.new\n      ActiveRecord::SchemaDumper.dump(source, dump_io)\n      expect(dump_io.string).to match /t\\.interval +\"duration\"/\n    end\n  end\n\n  context 'on OID' do\n    let(:reference) { 1.year + 2.months + 3.days + 4.hours + 5.minutes + 6.seconds }\n    subject { Torque::PostgreSQL::Adapter::OID::Interval.new }\n\n    context 'on deserialize' do\n      it 'returns nil' do\n        expect(subject.deserialize(nil)).to be_nil\n      end\n\n      it 'returns duration' do\n        value = subject.deserialize('P1Y2M3DT4H5M6S')\n\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(reference)\n      end\n    end\n\n    context 'on serialize' do\n      it 'returns nil' do\n        expect(subject.serialize(nil)).to be_nil\n      end\n\n      it 'returns seconds as string' do\n        expect(subject.serialize(3600.seconds)).to eq('PT3600S')\n      end\n\n      it 'retruns sample as string' do\n        expect(subject.serialize(reference)).to eq('P1Y2M3DT4H5M6S')\n      end\n\n      it 'transforms weeks into days' do\n        reference = subject.cast(1000000)\n        expect(subject.serialize(reference)).to eq('P11DT13H46M40S')\n      end\n    end\n\n    context 'on cast' do\n      it 'accepts nil' do\n        expect(subject.cast(nil)).to be_nil\n      end\n\n      it 'accepts string' do\n        value = subject.cast('P1Y2M3DT4H5M6S')\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(reference)\n      end\n\n      it 'accepts duration' do\n        value = subject.cast(5.days)\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eql(value)\n      end\n\n      it 'accepts small seconds numeric' do\n        value = subject.cast(30)\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(30)\n      end\n\n      it 'accepts long seconds numeric' do\n        value = subject.cast(reference.to_i)\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(reference)\n      end\n\n      it 'accepts array with Y-M-D H:M:S format' do\n        value = subject.cast([1, 2, 3, 4, 5, 6])\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(reference)\n      end\n\n      it 'accepts array with empty values' do\n        value = subject.cast([nil, 0, 12, 30, 0])\n        sample = 12.hours + 30.minutes\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value.inspect).to eq(sample.inspect)\n        expect(value).to eq(sample)\n      end\n\n      it 'accepts array with string' do\n        value = subject.cast(['45', '15'])\n        sample = 45.minutes + 15.seconds\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value.inspect).to eq(sample.inspect)\n        expect(value).to eq(sample)\n      end\n\n      it 'accepts hash' do\n        value = subject.cast({years: 1, months: 2, days: 3, hours: 4, minutes: 5, seconds: 6})\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(reference)\n      end\n\n      it 'accepts hash with extra elements' do\n        value = subject.cast({extra: 1, hours: 12, minutes: 30})\n        sample = 12.hours + 30.minutes\n        expect(value).to be_a(ActiveSupport::Duration)\n        expect(value).to eq(sample)\n      end\n\n      it 'returns any other type of value as it is' do\n        value = subject.cast(true)\n        expect(value).to eql(true)\n      end\n    end\n  end\n\n  context 'on I18n' do\n    it 'transforms the value into singular text' do\n      expect(I18n.l 1.year).to eql('1 year')\n      expect(I18n.l 1.months).to eql('1 month')\n      expect(I18n.l 1.weeks).to eql('1 week')\n      expect(I18n.l 1.days).to eql('1 day')\n      expect(I18n.l 1.hours).to eql('1 hour')\n      expect(I18n.l 1.minutes).to eql('1 minute')\n      expect(I18n.l 1.seconds).to eql('1 second')\n    end\n\n    it 'transforms the value into plural text' do\n      expect(I18n.l 2.year).to eql('2 years')\n      expect(I18n.l 2.months).to eql('2 months')\n      expect(I18n.l 2.weeks).to eql('2 weeks')\n      expect(I18n.l 2.days).to eql('2 days')\n      expect(I18n.l 2.hours).to eql('2 hours')\n      expect(I18n.l 2.minutes).to eql('2 minutes')\n      expect(I18n.l 2.seconds).to eql('2 seconds')\n    end\n\n    it 'transforms multiple values' do\n      value = 1.year + 2.months + 3.days + 4.hours + 5.minutes + 6.seconds\n      expect(I18n.l value).to eql('1 year, 2 months, 3 days, 4 hours, 5 minutes, and 6 seconds')\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/lazy_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Lazy', type: :helper do\n  subject { Torque::PostgreSQL::Attributes::Lazy }\n\n  it 'is consider nil' do\n    expect(subject.new(String, '')).to be_nil\n  end\n\n  it 'inspects as nil' do\n    expect(subject.new(String, '').inspect).to be_eql('nil')\n  end\n\n  it 'compares to nil only' do\n    expect(subject.new(String, '') == nil).to be_truthy\n    expect(subject.new(String, '') == '').to be_falsey\n    expect(subject.new(String, '') == 0).to be_falsey\n  end\n\n  it 'starts the object only on method call' do\n    expect(subject.new(String, '').to_s).to be_a(String)\n    expect(subject.new(String, '')).to respond_to(:chop)\n  end\nend\n"
  },
  {
    "path": "spec/tests/period_spec.rb",
    "content": "require 'spec_helper'\n\n# TODO: Convert to shared examples\nRSpec.describe 'Period' do\n  let(:model) { Class.new(TimeKeeper) }\n  let(:instance) { model.new }\n  let(:fields) { %i[available period tzperiod] }\n  let(:method_names) { Torque::PostgreSQL::config.period.method_names }\n  let(:attribute_klass) { Torque::PostgreSQL::Attributes::Period }\n\n  let(:true_value) { 'TRUE' }\n  let(:false_value) { 'FALSE' }\n\n  let(:klass_methods_range) { (0..22) }\n  let(:instance_methods_range) { (23..29) }\n\n  let(:klass_method_names) { method_names.to_a[klass_methods_range].to_h }\n  let(:instance_method_names) { method_names.to_a[instance_methods_range].to_h }\n\n  before { Time.zone = 'UTC' }\n\n  def decorate(model, field, options = {})\n    attribute_klass.include_on(model, :period_for)\n    model.period_for(field, **options)\n  end\n\n  context 'on config' do\n    let(:direct_method_names) do\n      list = method_names.dup\n      list.merge!(Torque::PostgreSQL::config.period.direct_method_names)\n      list.values.map { |v| v.gsub(/_?%s_?/, '') }\n    end\n\n    let(:other_method_names) do\n      method_names.transform_values.with_index { |_, idx| \"p__#{idx}\" }\n    end\n\n    it 'has definition method on the model' do\n      attribute_klass.include_on(ActiveRecord::Base, :period_for)\n      expect(model).to respond_to(:period_for)\n      ActiveRecord::Base.singleton_class.send(:undef_method, :period_for)\n    end\n\n    it 'create the methods with custom names' do\n      decorate(model, :tzperiod, threshold: 5.minutes, methods: other_method_names)\n\n      klass_method_names.size.times do |i|\n        expect(model).to respond_to(\"p__#{i}\")\n      end\n\n      initial = instance_methods_range.min\n      instance_method_names.size.times do |i|\n        expect(instance).to respond_to(\"p__#{initial + i}\")\n      end\n    end\n\n    it 'creates non prefixed methods if requested' do\n      decorate(model, :tzperiod, prefixed: false, threshold: 5.minutes)\n\n      direct_method_names[klass_methods_range].each do |m|\n        expect(model).to respond_to(m)\n      end\n\n      direct_method_names[instance_methods_range].each do |m|\n        expect(instance).to respond_to(m)\n      end\n    end\n  end\n\n  context 'on tsrange' do\n    let(:type) { :tsrange }\n    let(:value) { Time.zone.now.beginning_of_minute }\n    let(:db_field) { '\"time_keepers\".\"period\"' }\n    let(:db_value) { \"'#{value.strftime('%F %T')}'\" }\n\n    let(:cast_type) { '::timestamp' }\n    let(:cast_db_value) { \"#{db_value}#{cast_type}\" }\n    let(:empty_condition) { \"#{type.to_s.upcase}(NULL, NULL)\" }\n    let(:nullif_condition) { \"NULLIF(#{db_field}, #{empty_condition})\" }\n\n    let(:date_type) { :daterange }\n    let(:lower_date) { \"LOWER(#{db_field})::date\" }\n    let(:upper_date) { \"UPPER(#{db_field})::date\" }\n    let(:date_db_field) { \"#{date_type.to_s.upcase}(#{lower_date}, #{upper_date}, '[]')\" }\n\n    context 'on model' do\n      before { decorate(model, :period) }\n\n      it 'queries current on period' do\n        expect(model.period_on(value).to_sql).to include(<<-SQL.squish)\n          COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})\n        SQL\n      end\n\n      it 'queries current period' do\n        expect(model.current_period.to_sql).to include(<<-SQL.squish)\n          COALESCE(#{nullif_condition} @>\n        SQL\n\n        expect(model.current_period.to_sql).to include(<<-SQL.squish)\n          #{cast_type}, #{true_value})\n        SQL\n      end\n\n      it 'queries not current period' do\n        expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n          NOT (COALESCE(#{nullif_condition} @>\n        SQL\n\n        expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n          #{cast_type}, #{true_value})\n        SQL\n      end\n\n      it 'queries containing period' do\n        expect(model.period_containing(:test).to_sql).to include(<<-SQL.squish)\n          #{db_field} @> \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_containing(value).to_sql).to include(<<-SQL.squish)\n          #{db_field} @> #{db_value}\n        SQL\n      end\n\n      it 'queries not containing period' do\n        expect(model.period_not_containing(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} @> \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.period_not_containing(value).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} @> #{db_value})\n        SQL\n      end\n\n      it 'queries overlapping period' do\n        expect(model.period_overlapping(:test).to_sql).to include(<<-SQL.squish)\n          #{db_field} && \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n          #{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})\n        SQL\n      end\n\n      it 'queries not overlapping period' do\n        expect(model.period_not_overlapping(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} && \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.period_not_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value}))\n        SQL\n      end\n\n      it 'queries starting after period' do\n        expect(model.period_starting_after(:test).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_starting_after(value).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) > #{db_value}\n        SQL\n      end\n\n      it 'queries starting before period' do\n        expect(model.period_starting_before(:test).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_starting_before(value).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) < #{db_value}\n        SQL\n      end\n\n      it 'queries finishing after period' do\n        expect(model.period_finishing_after(:test).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_finishing_after(value).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) > #{db_value}\n        SQL\n      end\n\n      it 'queries finishing before period' do\n        expect(model.period_finishing_before(:test).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_finishing_before(value).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) < #{db_value}\n        SQL\n      end\n\n      it 'does not have real starting after for period' do\n        expect(model.all).not_to respond_to(:real_starting_after)\n      end\n\n      it 'does not have real starting before for period' do\n        expect(model.all).not_to respond_to(:real_starting_before)\n      end\n\n      it 'does not have real finishing after for period' do\n        expect(model.all).not_to respond_to(:real_finishing_after)\n      end\n\n      it 'does not have real finishing before for period' do\n        expect(model.all).not_to respond_to(:real_finishing_before)\n      end\n\n      it 'queries containing date period' do\n        expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)\n          #{date_db_field} @> \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)\n          #{date_db_field} @> #{db_value}::date\n        SQL\n      end\n\n      it 'queries not containing date period' do\n        expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{date_db_field} @> \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)\n          NOT (#{date_db_field} @> #{db_value}::date)\n        SQL\n      end\n\n      it 'queries overlapping date period' do\n        expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n          #{date_db_field} && \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n          #{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)\n        SQL\n      end\n\n      it 'queries not overlapping date period' do\n        expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{date_db_field} && \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n          NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))\n        SQL\n      end\n\n      it 'does not have real containing date period' do\n        expect(model.all).not_to respond_to(:period_real_containing_date)\n      end\n\n      it 'does not have real overlapping date period' do\n        expect(model.all).not_to respond_to(:period_real_overlapping_date)\n      end\n    end\n\n    context 'on instance' do\n      before { decorate(model, :period) }\n\n      it 'checks for current value' do\n        instance.period = 1.hour.ago.utc..1.hour.from_now.utc\n        expect(instance).to be_current_period\n\n        instance.period = 4.hour.from_now.utc..6.hour.from_now.utc\n        expect(instance).not_to be_current_period\n\n        instance.period = [nil, 4.hours.ago.utc]\n        expect(instance).not_to be_current_period\n\n        instance.period = [4.hours.from_now.utc, nil]\n        expect(instance).not_to be_current_period\n\n        instance.period = [nil, nil]\n        expect(instance).to be_current_period\n      end\n\n      it 'checks fro current based on a value' do\n        instance.period = 1.hour.ago.utc..1.hour.from_now.utc\n        expect(instance).to be_current_period_on(5.minutes.from_now.utc)\n\n        instance.period = 4.hour.from_now.utc..6.hour.from_now.utc\n        expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)\n      end\n\n      it 'returns the start time' do\n        instance.period = 1.hour.ago.utc..1.hour.from_now.utc\n        expect(instance.period_start).to be_eql(instance.period.min)\n\n        instance.period = 4.hour.from_now.utc..6.hour.from_now.utc\n        expect(instance.period_start).to be_eql(instance.period.min)\n      end\n\n      it 'returns the finish time' do\n        instance.period = 1.hour.ago.utc..1.hour.from_now.utc\n        expect(instance.period_finish).to be_eql(instance.period.max)\n\n        instance.period = 4.hour.from_now.utc..6.hour.from_now.utc\n        expect(instance.period_finish).to be_eql(instance.period.max)\n      end\n    end\n\n    context 'with field threshold' do\n      before { decorate(model, :period, threshold: :th) }\n\n      let(:lower_db_field) { \"(LOWER(#{db_field}) - #{threshold_value})\" }\n      let(:upper_db_field) { \"(UPPER(#{db_field}) + #{threshold_value})\" }\n      let(:threshold_value) { '\"time_keepers\".\"th\"' }\n      let(:threshold_db_field) { \"#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})\" }\n      let(:nullif_condition) { \"NULLIF(#{threshold_db_field}, #{empty_condition})\" }\n      let(:threshold_date_db_field) do\n        \"DATERANGE(#{lower_db_field}::date, #{upper_db_field}::date, '[]')\"\n      end\n\n      context 'on model' do\n        it 'queries current on period' do\n          expect(model.period_on(value).to_sql).to include(<<-SQL.squish)\n            COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})\n          SQL\n        end\n\n        it 'queries current period' do\n          expect(model.current_period.to_sql).to include(<<-SQL.squish)\n            COALESCE(#{nullif_condition} @>\n          SQL\n\n          expect(model.current_period.to_sql).to include(<<-SQL.squish)\n            #{cast_type}, #{true_value})\n          SQL\n        end\n\n        it 'queries not current period' do\n          expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n            NOT (COALESCE(#{nullif_condition} @>\n          SQL\n\n          expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n            #{cast_type}, #{true_value})\n          SQL\n        end\n\n        it 'queries real containing period' do\n          expect(model.period_real_containing(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} @> \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_containing(value).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} @> #{db_value}\n          SQL\n        end\n\n        it 'queries real overlapping period' do\n          expect(model.period_real_overlapping(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} && \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})\n          SQL\n        end\n\n        it 'queries real starting after for period' do\n          expect(model.period_real_starting_after(:test).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} > \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_starting_after(value).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} > #{db_value}\n          SQL\n        end\n\n        it 'queries real starting before for period' do\n          expect(model.period_real_starting_before(:test).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} < \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_starting_before(value).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} < #{db_value}\n          SQL\n        end\n\n        it 'queries real finishing after for period' do\n          expect(model.period_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} > \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_finishing_after(value).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} > #{db_value}\n          SQL\n        end\n\n        it 'queries real finishing before for period' do\n          expect(model.period_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} < \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_finishing_before(value).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} < #{db_value}\n          SQL\n        end\n\n        it 'queries containing date period' do\n          expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} @> \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} @> #{db_value}::date\n          SQL\n        end\n\n        it 'queries not containing date period' do\n          expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} @> \"time_keepers\".\"test\")\n          SQL\n\n          expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} @> #{db_value}::date)\n          SQL\n        end\n\n        it 'queries overlapping date period' do\n          expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} && \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)\n          SQL\n        end\n\n        it 'queries not overlapping date period' do\n          expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} && \"time_keepers\".\"test\")\n          SQL\n\n          expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))\n          SQL\n        end\n\n        it 'queries real containing date period' do\n          expect(model.period_real_containing_date(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_date_db_field} @> \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_containing_date(value).to_sql).to include(<<-SQL.squish)\n            #{threshold_date_db_field} @> #{db_value}::date\n          SQL\n        end\n\n        it 'queries real overlapping date period' do\n          expect(model.period_real_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_date_db_field} && \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n            #{threshold_date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)\n          SQL\n        end\n      end\n\n      context 'on instance' do\n        before { decorate(model, :period, threshold: :th) }\n        before { instance.th = 1.hour }\n\n        it 'checks for current value' do\n          instance.period = nil\n          expect(instance).to be_current_period\n\n          instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          expect(instance).to be_current_period\n\n          instance.period = (Time.zone.now + 90.minutes)..(Time.zone.now + 3.hour)\n          expect(instance).not_to be_current_period\n        end\n\n        it 'checks for current based on a value' do\n          instance.period = nil\n          expect(instance).to be_current_period_on(5.minutes.from_now.utc)\n\n          instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          expect(instance).to be_current_period_on(5.minutes.from_now.utc)\n\n          instance.period = 90.minutes.from_now.utc..3.hour.from_now.utc\n          expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)\n        end\n\n        it 'returns the real range' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.real_period.min).to be_eql(value.min - 1.hour)\n          expect(instance.real_period.max).to be_eql(value.max + 1.hour)\n        end\n\n        it 'returns the real start' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.period_real_start).to be_eql(value.min - 1.hour)\n        end\n\n        it 'returns the real finish' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.period_real_finish).to be_eql(value.max + 1.hour)\n        end\n      end\n    end\n\n    context 'with value threshold' do\n      before { decorate(model, :period, threshold: 5.minutes) }\n\n      let(:lower_db_field) { \"(LOWER(#{db_field}) - #{threshold_value})\" }\n      let(:upper_db_field) { \"(UPPER(#{db_field}) + #{threshold_value})\" }\n      let(:threshold_value) { \"'300 seconds'::interval\" }\n      let(:threshold_db_field) { \"#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})\" }\n      let(:nullif_condition) { \"NULLIF(#{threshold_db_field}, #{empty_condition})\" }\n\n      context 'on model' do\n        it 'queries current on period' do\n          expect(model.period_on(value).to_sql).to include(<<-SQL.squish)\n            COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})\n          SQL\n        end\n\n        it 'queries current period' do\n          expect(model.current_period.to_sql).to include(<<-SQL.squish)\n            COALESCE(#{nullif_condition} @>\n          SQL\n\n          expect(model.current_period.to_sql).to include(<<-SQL.squish)\n            #{cast_type}, #{true_value})\n          SQL\n        end\n\n        it 'queries not current period' do\n          expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n            NOT (COALESCE(#{nullif_condition} @>\n          SQL\n\n          expect(model.not_current_period.to_sql).to include(<<-SQL.squish)\n            #{cast_type}, #{true_value})\n          SQL\n        end\n\n        it 'queries real containing period' do\n          expect(model.period_real_containing(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} @> \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_containing(value).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} @> #{db_value}\n          SQL\n        end\n\n        it 'queries real overlapping period' do\n          expect(model.period_real_overlapping(:test).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} && \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n            #{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})\n          SQL\n        end\n\n        it 'queries real starting after for period' do\n          expect(model.period_real_starting_after(:test).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} > \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_starting_after(value).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} > #{db_value}\n          SQL\n        end\n\n        it 'queries real starting before for period' do\n          expect(model.period_real_starting_before(:test).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} < \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_starting_before(value).to_sql).to include(<<-SQL.squish)\n            #{lower_db_field} < #{db_value}\n          SQL\n        end\n\n        it 'queries real finishing after for period' do\n          expect(model.period_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} > \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_finishing_after(value).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} > #{db_value}\n          SQL\n        end\n\n        it 'queries real finishing before for period' do\n          expect(model.period_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} < \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_real_finishing_before(value).to_sql).to include(<<-SQL.squish)\n            #{upper_db_field} < #{db_value}\n          SQL\n        end\n\n        it 'queries containing date period' do\n          expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} @> \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} @> #{db_value}\n          SQL\n        end\n\n        it 'queries not containing date period' do\n          expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} @> \"time_keepers\".\"test\")\n          SQL\n\n          expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} @> #{db_value}::date)\n          SQL\n        end\n\n        it 'queries overlapping date period' do\n          expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} && \"time_keepers\".\"test\"\n          SQL\n\n          expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n            #{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)\n          SQL\n        end\n\n        it 'queries not overlapping date period' do\n          expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} && \"time_keepers\".\"test\")\n          SQL\n\n          expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)\n            NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))\n          SQL\n        end\n      end\n\n      context 'on instance' do\n        before { decorate(model, :period, threshold: 45.minutes) }\n\n        it 'checks for current value' do\n          instance.period = nil\n          expect(instance).to be_current_period\n\n          instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          expect(instance).to be_current_period\n\n          instance.period = (Time.zone.now + 90.minutes)..(Time.zone.now + 3.hour)\n          expect(instance).not_to be_current_period\n        end\n\n        it 'checks for current based on a value' do\n          instance.period = nil\n          expect(instance).to be_current_period_on(5.minutes.from_now.utc)\n\n          instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          expect(instance).to be_current_period_on(5.minutes.from_now.utc)\n\n          instance.period = 90.minutes.from_now.utc..3.hour.from_now.utc\n          expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)\n        end\n\n        it 'returns the real range' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.real_period.min).to be_eql(value.min - 45.minutes)\n          expect(instance.real_period.max).to be_eql(value.max + 45.minutes)\n        end\n\n        it 'returns the real start' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.period_real_start).to be_eql(value.min - 45.minutes)\n        end\n\n        it 'returns the real finish' do\n          value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)\n          instance.period = value\n          expect(instance.period_real_finish).to be_eql(value.max + 45.minutes)\n        end\n      end\n    end\n  end\n\n  context 'on daterange' do\n    let(:type) { :daterange }\n    let(:value) { Date.today }\n    let(:db_field) { '\"time_keepers\".\"available\"' }\n    let(:db_value) { \"'#{value.strftime('%F')}'\" }\n\n    let(:cast_type) { '::date' }\n    let(:cast_db_value) { \"#{db_value}#{cast_type}\" }\n    let(:empty_condition) { \"#{type.to_s.upcase}(NULL, NULL)\" }\n    let(:nullif_condition) { \"NULLIF(#{threshold_db_field}, #{empty_condition})\" }\n\n    let(:lower_db_field) { \"(LOWER(#{db_field}) - #{threshold_value})::date\" }\n    let(:upper_db_field) { \"(UPPER(#{db_field}) + #{threshold_value})::date\" }\n    let(:threshold_value) { \"'86400 seconds'::interval\" }\n    let(:threshold_db_field) { \"#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})\" }\n\n    before { decorate(model, :available, pessimistic: true, threshold: 1.day) }\n\n    context 'on model' do\n      it 'queries current on available' do\n        expect(model.available_on(value).to_sql).to include(<<-SQL.squish)\n          COALESCE(#{nullif_condition} @> #{cast_db_value}, #{false_value})\n        SQL\n      end\n\n      it 'queries current available' do\n        expect(model.current_available.to_sql).to include(<<-SQL.squish)\n          COALESCE(#{nullif_condition} @>\n        SQL\n\n        expect(model.current_available.to_sql).to include(<<-SQL.squish)\n          #{cast_type}, #{false_value})\n        SQL\n      end\n\n      it 'queries not current available' do\n        expect(model.not_current_available.to_sql).to include(<<-SQL.squish)\n          NOT (COALESCE(#{nullif_condition} @>\n        SQL\n\n        expect(model.not_current_available.to_sql).to include(<<-SQL.squish)\n          #{cast_type}, #{false_value})\n        SQL\n      end\n\n      it 'queries containing available' do\n        expect(model.available_containing(:test).to_sql).to include(<<-SQL.squish)\n          #{db_field} @> \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_containing(value).to_sql).to include(<<-SQL.squish)\n          #{db_field} @> #{db_value}\n        SQL\n      end\n\n      it 'queries not containing available' do\n        expect(model.available_not_containing(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} @> \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.available_not_containing(value).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} @> #{db_value})\n        SQL\n      end\n\n      it 'queries overlapping available' do\n        expect(model.available_overlapping(:test).to_sql).to include(<<-SQL.squish)\n          #{db_field} && \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n          #{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})\n        SQL\n      end\n\n      it 'queries not overlapping available' do\n        expect(model.available_not_overlapping(:test).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} && \"time_keepers\".\"test\")\n        SQL\n\n        expect(model.available_not_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n          NOT (#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value}))\n        SQL\n      end\n\n      it 'queries starting after available' do\n        expect(model.available_starting_after(:test).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_starting_after(value).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) > #{db_value}\n        SQL\n      end\n\n      it 'queries starting before available' do\n        expect(model.available_starting_before(:test).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_starting_before(value).to_sql).to include(<<-SQL.squish)\n          LOWER(#{db_field}) < #{db_value}\n        SQL\n      end\n\n      it 'queries finishing after available' do\n        expect(model.available_finishing_after(:test).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_finishing_after(value).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) > #{db_value}\n        SQL\n      end\n\n      it 'queries finishing before available' do\n        expect(model.available_finishing_before(:test).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_finishing_before(value).to_sql).to include(<<-SQL.squish)\n          UPPER(#{db_field}) < #{db_value}\n        SQL\n      end\n\n      it 'queries real containing available' do\n        expect(model.available_real_containing(:test).to_sql).to include(<<-SQL.squish)\n          #{threshold_db_field} @> \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_containing(value).to_sql).to include(<<-SQL.squish)\n          #{threshold_db_field} @> #{db_value}\n        SQL\n      end\n\n      it 'queries real overlapping available' do\n        expect(model.available_real_overlapping(:test).to_sql).to include(<<-SQL.squish)\n          #{threshold_db_field} && \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)\n          #{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})\n        SQL\n      end\n\n      it 'queries real starting after for available' do\n        expect(model.available_real_starting_after(:test).to_sql).to include(<<-SQL.squish)\n          #{lower_db_field} > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_starting_after(value).to_sql).to include(<<-SQL.squish)\n          #{lower_db_field} > #{db_value}\n        SQL\n      end\n\n      it 'queries real starting before for available' do\n        expect(model.available_real_starting_before(:test).to_sql).to include(<<-SQL.squish)\n          #{lower_db_field} < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_starting_before(value).to_sql).to include(<<-SQL.squish)\n          #{lower_db_field} < #{db_value}\n        SQL\n      end\n\n      it 'queries real finishing after for available' do\n        expect(model.available_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)\n          #{upper_db_field} > \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_finishing_after(value).to_sql).to include(<<-SQL.squish)\n          #{upper_db_field} > #{db_value}\n        SQL\n      end\n\n      it 'queries real finishing before for available' do\n        expect(model.available_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)\n          #{upper_db_field} < \"time_keepers\".\"test\"\n        SQL\n\n        expect(model.available_real_finishing_before(value).to_sql).to include(<<-SQL.squish)\n          #{upper_db_field} < #{db_value}\n        SQL\n      end\n\n      it 'does not query containing date available' do\n        expect(model.all).not_to respond_to(:available_containing_date)\n      end\n\n      it 'does not query not containing date available' do\n        expect(model.all).not_to respond_to(:available_not_containing_date)\n      end\n\n      it 'does not query overlapping date available' do\n        expect(model.all).not_to respond_to(:available_overlapping_date)\n      end\n\n      it 'does not query not overlapping date available' do\n        expect(model.all).not_to respond_to(:available_not_overlapping_date)\n      end\n\n      it 'does not query real containing date available' do\n        expect(model.all).not_to respond_to(:available_real_containing_date)\n      end\n\n      it 'does not query real overlapping date available' do\n        expect(model.all).not_to respond_to(:available_real_overlapping_date)\n      end\n    end\n\n    context 'on instance' do\n      it 'checks for current value' do\n        instance.available = nil\n        expect(instance).not_to be_current_available\n\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance).to be_current_available\n\n        instance.available = Date.new.prev_month..Date.new.next_month\n        expect(instance).not_to be_current_available\n      end\n\n      it 'checks fro current based on a value' do\n        instance.available = nil\n        expect(instance).not_to be_current_available_on(Date.tomorrow)\n\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance).to be_current_available_on(Date.tomorrow)\n\n        instance.available = Date.new.prev_month..Date.new.next_month\n        expect(instance).to be_current_available_on(Date.new.next_month)\n      end\n\n      it 'returns the start date' do\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance.available_start).to be_eql(instance.available.min)\n\n        instance.available = Date.new.prev_month..Date.new.next_month\n        expect(instance.available_start).to be_eql(instance.available.min)\n      end\n\n      it 'returns the finish date' do\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance.available_finish).to be_eql(instance.available.max)\n\n        instance.available = Date.new.prev_month..Date.new.next_month\n        expect(instance.available_finish).to be_eql(instance.available.max)\n      end\n\n      it 'returns the real range' do\n        value = Date.yesterday..Date.tomorrow\n        instance.available = value\n        expect(instance.real_available.min).to be_eql(value.min.prev_day)\n        expect(instance.real_available.max).to be_eql(value.max.next_day)\n      end\n\n      it 'returns the real start date' do\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance.available_real_start).to be_eql(instance.available.min.prev_day)\n      end\n\n      it 'returns the real finish date' do\n        instance.available = Date.yesterday..Date.tomorrow\n        expect(instance.available_real_finish).to be_eql(instance.available.max.next_day)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/predicate_builder_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'PredicateBuilder' do\n  describe 'on enumerator lazy' do\n    let(:timed_out_error) do\n      Torque::PostgreSQL::PredicateBuilder::EnumeratorLazyHandler::Timeout\n    end\n\n    subject { Video.all }\n\n    after do\n      Torque::PostgreSQL.config.predicate_builder.lazy_timeout = 0.02\n      Torque::PostgreSQL.config.predicate_builder.lazy_limit = 2_000\n    end\n\n    it 'works with provided value' do\n      sql = subject.where(id: [1,2,3].lazy).to_sql\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"id\\\" IN (1, 2, 3)\")\n    end\n\n    it 'handles gracefully a timeout' do\n      Torque::PostgreSQL.config.predicate_builder.lazy_timeout = 0.01\n      Torque::PostgreSQL.config.predicate_builder.lazy_limit = nil\n      expect { subject.where(id: (1..).lazy).to_sql }.to raise_error(timed_out_error)\n    end\n\n    it 'handles properly a limit' do\n      Torque::PostgreSQL.config.predicate_builder.lazy_timeout = nil\n      Torque::PostgreSQL.config.predicate_builder.lazy_limit = 2\n\n      sql = subject.where(id: [1,2,3].lazy).to_sql\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"id\\\" IN (1, 2)\")\n    end\n  end\n\n  describe 'on arel attribute' do\n    subject { Item.all }\n\n    it 'works with both plain attributes' do\n      sql = subject.where(id: Item.arel_table[:id]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"id\\\" = \\\"items\\\".\\\"id\\\"\")\n    end\n\n    it 'works when when the left side is an array' do\n      sql = subject.where(tag_ids: Item.arel_table[:id]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"id\\\" = ANY(\\\"items\\\".\\\"tag_ids\\\")\")\n    end\n\n    it 'works when the right side is an array' do\n      sql = subject.where(id: Item.arel_table[:tag_ids]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"id\\\" = ANY(\\\"items\\\".\\\"tag_ids\\\")\")\n    end\n\n    it 'works when both are arrays' do\n      sql = subject.where(tag_ids: Item.arel_table[:tag_ids]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"tag_ids\\\" && \\\"items\\\".\\\"tag_ids\\\"\")\n    end\n  end\n\n  describe 'on array' do\n    subject { Item.all }\n\n    before { Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = true }\n    after { Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = false }\n\n    it 'works with plain array when disabled' do\n      Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = false\n\n      sql = subject.where(tag_ids: 1).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"tag_ids\\\" = 1\")\n\n      sql = subject.where(tag_ids: [1, 2, 3]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"tag_ids\\\" = '{1,2,3}'\")\n    end\n\n    it 'works with a single value' do\n      sql = subject.where(tag_ids: 1).to_sql\n      expect(sql).to include(\"WHERE 1 = ANY(\\\"items\\\".\\\"tag_ids\\\")\")\n    end\n\n    it 'works with an array value' do\n      sql = subject.where(tag_ids: [1, 2, 3]).to_sql\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"tag_ids\\\" && '{1,2,3}'\")\n    end\n\n    it 'works with an empty array' do\n      sql = subject.where(tag_ids: []).to_sql\n      expect(sql).to include(\"WHERE CARDINALITY(\\\"items\\\".\\\"tag_ids\\\") = 0\")\n    end\n\n    it 'properly binds the provided values' do\n      sql, binds = get_query_with_binds { subject.where(tag_ids: 1).load }\n      expect(sql).to include(\"WHERE $1 = ANY(\\\"items\\\".\\\"tag_ids\\\")\")\n      expect(binds.first.value).to eq(1)\n\n      sql, binds = get_query_with_binds { subject.where(tag_ids: [1, 2, 3]).load }\n      expect(sql).to include(\"WHERE \\\"items\\\".\\\"tag_ids\\\" && $1\")\n      expect(binds.first.value).to eq([1, 2, 3])\n\n      sql, binds = get_query_with_binds { subject.where(tag_ids: []).load }\n      expect(sql).to include(\"WHERE CARDINALITY(\\\"items\\\".\\\"tag_ids\\\") = 0\")\n      expect(binds).to be_empty\n    end\n  end\n\n  describe 'on regexp' do\n    subject { Video.all }\n\n    it 'works with a basic regular expression' do\n      sql = subject.where(title: /(a|b)/).to_sql\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"title\\\" ~ '(a|b)'\")\n    end\n\n    it 'works with a case-insensitive regular expression' do\n      sql = subject.where(title: /(a|b)/i).to_sql\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"title\\\" ~* '(a|b)'\")\n    end\n\n    it 'works with characters that need escape' do\n      sql = subject.where(title: %r{a|'|\"|\\\\}).to_sql\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"title\\\" ~ 'a|''|\\\"|\\\\\\\\'\")\n    end\n\n    it 'properly binds the provided value' do\n      query = subject.where(title: /(a|b)/)\n\n      sql, binds = get_query_with_binds { query.load }\n      expect(sql).to include(\"WHERE \\\"videos\\\".\\\"title\\\" ~ $1\")\n      expect(binds.first.value).to eq('(a|b)')\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/quoting_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Quoting', type: :helper do\n  let(:connection) { ActiveRecord::Base.connection }\n\n  context 'on type names' do\n    it 'accepts type name only' do\n      expect(connection.quote_type_name('sample')).to eql('\"public\".\"sample\"')\n    end\n\n    it 'accepts schema and type name' do\n      expect(connection.quote_type_name('other.sample')).to eql('\"other\".\"sample\"')\n    end\n\n    it 'accepts schema as a parameter' do\n      expect(connection.quote_type_name('sample', 'test')).to eql('\"test\".\"sample\"')\n    end\n\n    it 'always prefer the schema from parameter' do\n      expect(connection.quote_type_name('nothis.sample', 'this')).to eql('\"this\".\"sample\"')\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/tests/relation_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec::Matchers.define :be_attributes_as do |list|\n  match do |other|\n    other.each_with_index.map do |item, idx|\n      item.relation.name == list[idx][0] && item.name.to_s == list[idx][1]\n    end.all?\n  end\nend\n\nRSpec.describe 'Relation', type: :helper do\n\n  context 'on resolving columns' do\n    subject { Post.unscoped.method(:resolve_column) }\n\n    def attribute(relation, name)\n      result = Arel::Attributes::Attribute.new\n      result.relation = relation\n      result.name = name\n      result\n    end\n\n    it 'asserts sql literals' do\n      check = ['name', 'other.title']\n      expect(subject.call(check)).to eql(check)\n    end\n\n    it 'asserts attribute symbols' do\n      check = [:title, :content]\n      result = [['posts', 'title'], ['posts', 'content']]\n      expect(subject.call(check)).to be_attributes_as(result)\n    end\n\n    it 'asserts direct hash relations' do\n      check = [:title, author: :name]\n      result = [['posts', 'title'], ['authors', 'name']]\n      expect(subject.call(check)).to be_attributes_as(result)\n    end\n\n    it 'asserts multiple values on hash definition' do\n      check = [author: [:name, :age]]\n      result = [['authors', 'name'], ['authors', 'age']]\n      expect(subject.call(check)).to be_attributes_as(result)\n    end\n\n    it 'raises on relation not present' do\n      check = [supervisors: :name]\n      expect{ subject.call(check) }.to raise_error(ArgumentError, /Relation for/)\n    end\n\n    it 'raises on third level access' do\n      check = [author: [comments: :body]]\n      expect{ subject.call(check) }.to raise_error(ArgumentError, /on third level/)\n    end\n  end\n\n  context 'on joining series' do\n    let(:source) { Video.all }\n\n    it 'works' do\n      list = create_list(:video, 5)[1..4]\n      range = list.first.id..list.last.id\n      expect(source.join_series(range, with: :id).to_a).to eq(list)\n      expect(source.join_series(range, with: :id, step: 3).to_a).to eq([list.first, list.last])\n    end\n\n    it 'produces the right SQL' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      sql += ' AS series ON \"series\" = \"videos\".\"id\"'\n      expect(source.join_series(1..10, with: :id).to_sql).to eq(sql)\n    end\n\n    it 'can be renamed' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      sql += ' AS seq ON \"seq\" = \"videos\".\"id\"'\n      expect(source.join_series(1..10, with: :id, as: :seq).to_sql).to eq(sql)\n    end\n\n    it 'can contain the step' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer, 2::integer)'\n      sql += ' AS series ON \"series\" = \"videos\".\"id\"'\n      expect(source.join_series(1..10, with: :id, step: 2).to_sql).to eq(sql)\n    end\n\n    it 'works with float values' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES(1.0::numeric, 10.0::numeric, 0.5::numeric)'\n      sql += ' AS series ON \"series\" = \"videos\".\"id\"'\n      expect(source.join_series(1.0..10.0, with: :id, step: 0.5).to_sql).to eq(sql)\n    end\n\n    it 'works with time values' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES('\n      sql += \"'2025-01-01 00:00:00'::timestamp, '2025-01-01 01:00:00'::timestamp\"\n      sql += \", 'PT1M'::interval\"\n      sql += ') AS series ON \"series\" = \"videos\".\"created_at\"'\n      range = (Time.utc(2025, 1, 1, 0)..Time.utc(2025, 1, 1, 1))\n      expect(source.join_series(range, with: :created_at, step: 1.minute).to_sql).to eq(sql)\n    end\n\n    it 'works with date values' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES('\n      sql += \"'2025-01-01 00:00:00'::timestamp, '2025-01-02 00:00:00'::timestamp\"\n      sql += \", 'P1D'::interval\"\n      sql += ') AS series ON \"series\" = \"videos\".\"created_at\"'\n      range = (Date.new(2025, 1, 1)..Date.new(2025, 1, 2))\n      expect(source.join_series(range, with: :created_at, step: 1.day).to_sql).to eq(sql)\n    end\n\n    it 'works with time with zones values' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES('\n      sql += \"'2025-01-01 00:00:00'::timestamptz, '2025-01-01 01:00:00'::timestamptz\"\n      sql += \", 'PT1M'::interval\"\n      sql += ') AS series ON \"series\" = \"videos\".\"id\"'\n      left = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 0)\n      right = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 1)\n      expect(source.join_series(left..right, with: :id, step: 1.minute).to_sql).to eq(sql)\n    end\n\n    it 'can provide the additional time zone value' do\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES('\n      sql += \"'2025-01-01 00:00:00'::timestamptz, '2025-01-01 01:00:00'::timestamptz\"\n      sql += \", 'PT1M'::interval, 'UTC'::text\"\n      sql += ') AS series ON \"series\" = \"videos\".\"id\"'\n      left = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 0)\n      right = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 1)\n\n      query = source.join_series(left..right, with: :id, step: 1.minute, time_zone: 'UTC')\n      expect(query.to_sql).to eq(sql)\n    end\n\n    it 'can use other types of joins' do\n      sql = ' LEFT OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      expect(source.join_series(1..10, with: :id, mode: :left).to_sql).to include(sql)\n\n      sql = ' RIGHT OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      expect(source.join_series(1..10, with: :id, mode: :right).to_sql).to include(sql)\n\n      sql = ' FULL OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      expect(source.join_series(1..10, with: :id, mode: :full).to_sql).to include(sql)\n    end\n\n    it 'supports a complex way of joining' do\n      query = source.join_series(1..10) do |series, table|\n        table['id'].lteq(series)\n      end\n\n      sql = 'SELECT \"videos\".* FROM \"videos\"'\n      sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'\n      sql += ' AS series ON \"videos\".\"id\" <= \"series\"'\n      expect(query.to_sql).to eq(sql)\n    end\n\n    it 'properly binds all provided values' do\n      query = source.join_series(1..10, with: :id, step: 2)\n      sql, binds = get_query_with_binds { query.load }\n\n      expect(sql).to include('GENERATE_SERIES($1::integer, $2::integer, $3::integer)')\n      expect(binds.map(&:value)).to eq([1, 10, 2])\n    end\n\n    context 'on errors' do\n      it 'does not support non-range values' do\n        expect do\n          source.join_series(1, with: :id)\n        end.to raise_error(ArgumentError, /Range/)\n      end\n\n      it 'does not support beginless ranges' do\n        expect do\n          source.join_series(..10, with: :id)\n        end.to raise_error(ArgumentError, /Beginless/)\n      end\n\n      it 'does not support endless ranges' do\n        expect do\n          source.join_series(1.., with: :id)\n        end.to raise_error(ArgumentError, /Endless/)\n      end\n\n      it 'requires a step when using non-numeric ranges' do\n        range = Date.new(2025, 1, 1)..Date.new(2025, 1, 10)\n        expect do\n          source.join_series(range, with: :id)\n        end.to raise_error(ArgumentError, /:step/)\n      end\n\n      it 'has strict type of join support' do\n        expect do\n          source.join_series(1..10, with: :id, mode: :cross)\n        end.to raise_error(ArgumentError, /join type/)\n      end\n\n      it 'requires a :with keyword' do\n        expect do\n          source.join_series(1..10)\n        end.to raise_error(ArgumentError, /:with/)\n      end\n\n      it 'does not support unexpected values' do\n        expect do\n          source.join_series(1..10, step: :other)\n        end.to raise_error(ArgumentError, /value type/)\n      end\n    end\n  end\n\n  context 'on buckets' do\n    let(:source) { User.all }\n\n    it 'produces the right query' do\n      query = source.buckets(:age, 0..50, count: 5)\n      sql, binds = get_query_with_binds { query.load }\n\n      expect(sql).to include(<<~SQL.squish)\n        WIDTH_BUCKET(\"users\".\"age\", $1::numeric, $2::numeric, $3::integer) AS bucket\n      SQL\n      expect(binds.map(&:value)).to eq([0, 50, 5])\n    end\n\n    it 'can query records by buckets' do\n      list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]\n      query = source.buckets(:age, 0..50, count: 5).records\n\n      expect(query).to be_a(Hash)\n      expect(query.keys).to match_array([0...10, 10...20])\n      expect(query[0...10]).to match_array([list[0], list[1]])\n      expect(query[10...20]).to match_array([list[2]])\n    end\n\n    it 'can query buckets of roles' do\n      list = [create(:user, role: :visitor)]\n      list << create(:user, role: :assistant)\n      list << create(:user, role: :manager)\n      query = source.buckets(:role, %w[assistant manager], cast: :roles).records\n\n      expect(query).to be_a(Hash)\n      expect(query.keys).to match_array([nil, 'assistant', 'manager'])\n      expect(query[nil]).to eq([list[0]])\n      expect(query['assistant']).to eq([list[1]])\n      expect(query['manager']).to eq([list[2]])\n    end\n\n    it 'works with calculations' do\n      list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]\n      query = source.buckets(:age, 0..50, count: 5).count\n\n      expect(query).to be_a(Hash)\n      expect(query.keys).to match_array([0...10, 10...20])\n      expect(query[0...10]).to eq(2)\n      expect(query[10...20]).to eq(1)\n    end\n\n    it 'works with other types of calculations' do\n      list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]\n      query = source.buckets(:age, 0..50, count: 5).sum(:age)\n\n      expect(query).to be_a(Hash)\n      expect(query.keys).to match_array([0...10, 10...20])\n      expect(query[0...10]).to eq(10)\n      expect(query[10...20]).to eq(15)\n    end\n\n    it 'work with joins and merge' do\n      list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]\n      records = [create(:comment, user: list[0], content: 'Hello')]\n      records << create(:comment, user: list[1], content: 'World')\n      records << create(:comment, user: list[2], content: 'Test')\n\n      query = Comment.joins(:user).merge(source.buckets(:age, 0..50, count: 5)).records\n\n      expect(query).to be_a(Hash)\n      expect(query.keys).to match_array([0...10, 10...20])\n      expect(query[0...10]).to match_array([records[0], records[1]])\n      expect(query[10...20]).to match_array([records[2]])\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/tests/schema_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'Schema' do\n  let(:connection) { ActiveRecord::Base.connection }\n  let(:source) { ActiveRecord::Base.connection_pool }\n\n  before do\n    connection.instance_variable_set(:@schemas_blacklist, nil)\n    connection.instance_variable_set(:@schemas_whitelist, nil)\n  end\n\n  context 'on migration' do\n    it 'can check for existence' do\n      expect(connection.schema_exists?(:information_schema)).to be_falsey\n      expect(connection.schema_exists?(:information_schema, filtered: false)).to be_truthy\n    end\n\n    it 'can be created' do\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey\n      connection.create_schema(:legacy)\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy\n    end\n\n    it 'can be deleted' do\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey\n\n      connection.create_schema(:legacy)\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy\n\n      connection.drop_schema(:legacy)\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey\n    end\n\n    it 'works with whitelist' do\n      expect(connection.schema_exists?(:legacy)).to be_falsey\n      connection.create_schema(:legacy)\n\n      expect(connection.schema_exists?(:legacy)).to be_falsey\n      expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy\n\n      connection.schemas_whitelist.push('legacy')\n      expect(connection.schema_exists?(:legacy)).to be_truthy\n    end\n\n    context 'reverting' do\n      let(:migration) { ActiveRecord::Migration::Current.new('Testing') }\n\n      before do\n        allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages\n        connection.create_schema(:legacy)\n      end\n\n      it 'reverts the creation of a schema' do\n        expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy\n        migration.revert { migration.connection.create_schema(:legacy) }\n        expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey\n      end\n\n      it 'reverts the creation of a table' do\n        connection.create_table(:users, schema: :legacy) { |t| t.string(:name) }\n\n        expect(connection.table_exists?('legacy.users')).to be_truthy\n        migration.revert { migration.connection.create_table(:users, schema: :legacy) }\n        expect(connection.table_exists?('legacy.users')).to be_falsey\n      end\n    end\n  end\n\n  context 'on schema' do\n    let(:dump_result) do\n      ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))\n      dump_result.string\n    end\n\n    it 'does not add when there is no extra schemas' do\n      connection.drop_schema(:internal, force: :cascade)\n      expect(dump_result).not_to match /Custom schemas defined in this database/\n    end\n\n    it 'does not include tables from blacklisted schemas' do\n      connection.schemas_blacklist.push('internal')\n      expect(dump_result).not_to match /create_table \\\"users\\\",.*schema: +\"internal\"/\n    end\n\n    context 'with internal schema whitelisted' do\n      before { connection.schemas_whitelist.push('internal') }\n\n      it 'dumps the schemas' do\n        expect(dump_result).to match /create_schema \\\"internal\\\"/\n      end\n\n      it 'shows the internal users table in the connection tables list' do\n        expect(connection.tables).to include('internal.users')\n      end\n\n      it 'dumps tables on whitelisted schemas' do\n        expect(dump_result).to match /create_table \\\"users\\\",.*schema: +\"internal\"/\n      end\n    end\n\n    it 'does not affect serial ids' do\n      connection.create_table(:primary_keys, id: :serial) do |t|\n        t.string :title\n      end\n\n      parts = '\"primary_keys\", id: :serial, force: :cascade'\n      expect(dump_result).to match(/create_table #{parts} do /)\n    end\n  end\n\n  context 'on relation' do\n    let(:model) { Internal::User }\n    let(:table_name) { Torque::PostgreSQL::TableName.new(model, 'users') }\n\n    it 'adds the schema to the query' do\n      model.reset_table_name\n      expect(table_name.to_s).to eq('internal.users')\n      expect(model.all.to_sql).to match(/FROM \"internal\".\"users\"/)\n    end\n\n    it 'can load the schema from the module' do\n      allow(Internal).to receive(:schema).and_return('internal')\n      allow(model).to receive(:schema).and_return(nil)\n\n      model.reset_table_name\n      expect(table_name.to_s).to eq('internal.users')\n      expect(model.all.to_sql).to match(/FROM \"internal\".\"users\"/)\n    end\n\n    it 'does not change anything if the model has not configured a schema' do\n      allow(model).to receive(:schema).and_return(nil)\n\n      model.reset_table_name\n      expect(table_name.to_s).to eq('users')\n      expect(model.all.to_sql).to match(/FROM \"users\"/)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/table_inheritance_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'TableInheritance' do\n  let(:connection) { ActiveRecord::Base.connection }\n\n  context 'on migration' do\n    mock_create_table\n\n    it 'does not affect some basic forms of table creation' do\n      sql = connection.create_table('schema_migrations', id: false) do |t|\n        t.string :version, **connection.internal_string_options_for_primary_key\n      end\n\n      result = 'CREATE TABLE \"schema_migrations\"'\n      result << ' \\(\"version\" character varying( NOT NULL)? PRIMARY KEY\\)'\n      expect(sql).to match(/#{result}/)\n    end\n\n    it 'does not affect simple table creation' do\n      sql = connection.create_table(:activities) do |t|\n        t.string :title\n        t.boolean :active\n        t.timestamps\n      end\n\n      result = 'CREATE TABLE \"activities\" ('\n      result << '\"id\" bigserial primary key'\n      result << ', \"title\" character varying'\n      result << ', \"active\" boolean'\n      result << ', \"created_at\" timestamp(6) NOT NULL'\n      result << ', \"updated_at\" timestamp(6) NOT NULL'\n      result << ')'\n      expect(sql).to eql(result)\n    end\n\n    it 'does not affect temporary table creation based on a query' do\n      query = 'SELECT * FROM \"authors\"'\n      sql = connection.create_table(:test, temporary: true, as: query)\n\n      result = 'CREATE TEMPORARY TABLE \"test\"'\n      result << \"  AS #{query}\"\n      expect(sql).to eql(result)\n    end\n\n    it 'adds the inherits statement for a single inheritance' do\n      sql = connection.create_table(:activity_videos, inherits: :activities) do |t|\n        t.string :url\n      end\n\n      result = 'CREATE TABLE \"activity_videos\" ('\n      result << '\"url\" character varying'\n      result << ') INHERITS ( \"activities\" )'\n      expect(sql).to eql(result)\n    end\n\n    it 'adds the inherits statement for a multiple inheritance' do\n      sql = connection.create_table(:activity_tests, inherits: [:activities, :tests]) do |t|\n        t.string :grade\n      end\n\n      result = 'CREATE TABLE \"activity_tests\" ('\n      result << '\"grade\" character varying'\n      result << ') INHERITS ( \"activities\" , \"tests\" )'\n      expect(sql).to eql(result)\n    end\n\n    it 'allows empty-body create table operation' do\n      sql = connection.create_table(:activity_posts, inherits: :activities)\n      result = \"CREATE TABLE \\\"activity_posts\\\" ()\"\n      result << ' INHERITS ( \"activities\" )'\n      expect(sql).to eql(result)\n    end\n  end\n\n  context 'on schema' do\n    let(:source) { ActiveRecord::Base.connection_pool }\n    let(:dump_result) do\n      ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))\n      dump_result.string\n    end\n\n    it 'dumps single inheritance with body' do\n      parts = '\"activity_books\"'\n      parts << ', id: false'\n      parts << ', inherits: \"activities\"'\n      parts << ', force: :cascade'\n      expect(dump_result).to match(/create_table #{parts} do /)\n    end\n\n    it 'dumps single inheritance without body' do\n      parts = '\"activity_post_samples\"'\n      parts << ', id: false'\n      parts << ', inherits: \"activity_posts\"'\n      parts << ', force: :cascade'\n      expect(dump_result).to match(/create_table #{parts}(?! do \\|t\\|)/)\n    end\n\n    it 'dumps multiple inheritance' do\n      parts = '\"activity_posts\"'\n      parts << ', id: false'\n      parts << ', inherits: (\\[\"images\", \"activities\"\\]|\\[\"activities\", \"images\"\\])'\n      parts << ', force: :cascade'\n      expect(dump_result).to match(/create_table #{parts}/)\n    end\n  end\n\n  context 'on schema cache' do\n    let(:schema_cache) { ActiveRecord::Base.connection.schema_cache }\n    let(:schema_cache_reflection) { schema_cache.instance_variable_get(:@schema_reflection) }\n    let(:new_schema_cache) { schema_cache_reflection.send(:cache, schema_cache_source) }\n    let(:schema_cache_source) { schema_cache.instance_variable_get(:@pool) }\n\n    subject { new_schema_cache }\n\n    it 'correctly defines the associations' do\n      scenario = {\n        'M' => %w(N),\n        'N' => %w(C),\n        'C' => %w(B),\n        'B' => %w(A),\n        'D' => %w(A),\n        'F' => %w(E),\n        'G' => %w(E H),\n      }\n\n      subject.instance_variable_set(:@inheritance_loaded, true)\n      subject.instance_variable_set(:@inheritance_dependencies, scenario)\n      subject.instance_variable_set(:@inheritance_associations, subject.send(:generate_associations))\n      subject.instance_variable_set(:@data_sources_model_names, {})\n      expect(subject.instance_variable_get(:@inheritance_associations)).to eql({\n        'A' => %w(B D C N M),\n        'B' => %w(C N M),\n        'C' => %w(N M),\n        'N' => %w(M),\n        'E' => %w(F G),\n        'H' => %w(G),\n      })\n    end\n\n    context 'on looking up models' do\n      let(:prepare_arguments) { [schema_cache_source] }\n      let(:prepare_method) { :add_all }\n\n      after(:all) do\n        schema_cache = ActiveRecord::Base.connection.schema_cache\n        schema_cache.instance_variable_set(:@data_sources, {})\n        schema_cache.instance_variable_set(:@data_sources_model_names, {})\n      end\n\n      it 'respect irregular names' do\n        allow(Torque::PostgreSQL.config).to receive(:irregular_models).and_return({\n          'public.posts' => 'ActivityPost',\n        })\n\n        subject.send(prepare_method, *prepare_arguments)\n        list = subject.instance_variable_get(:@data_sources_model_names)\n        expect(list).to have_key('public.posts')\n        expect(list['public.posts']).to eql(ActivityPost)\n      end\n\n      it 'does not load irregular where the data source is not defined' do\n        allow(Torque::PostgreSQL.config).to receive(:irregular_models).and_return({\n          'products' => 'Product',\n        })\n\n        subject.send(prepare_method, *prepare_arguments)\n        list = subject.instance_variable_get(:@data_sources_model_names)\n        expect(list).to_not have_key('products')\n      end\n\n      it 'works with eager loading' do\n        allow(Torque::PostgreSQL.config).to receive(:eager_load).and_return(true)\n        ActivityPost.reset_table_name\n\n        list = subject.instance_variable_get(:@data_sources_model_names)\n        expect(list).to have_key('activity_posts')\n        expect(list['activity_posts']).to eql(ActivityPost)\n      end\n\n      {\n        'activities' => 'Activity',\n        'activity_posts' => 'ActivityPost',\n        'activity_post_samples' => 'ActivityPost::Sample',\n      }.each do |table_name, expected_model|\n        it \"translate the table name #{table_name} to #{expected_model} model\" do\n          expect(subject.lookup_model(table_name)).to eql(expected_model.constantize)\n        end\n      end\n    end\n  end\n\n  context 'on inheritance' do\n    let(:base) { Activity }\n    let(:child) { ActivityPost }\n    let(:child2) { ActivityBook }\n    let(:other) { AuthorJournalist }\n\n    before { ActiveRecord::Base.connection.schema_cache.clear! }\n\n    it 'identifies mergeable attributes' do\n      result_base = %w(id author_id title active kind created_at updated_at description url file post_id)\n      expect(base.inheritance_mergeable_attributes.sort).to eql(result_base.sort)\n    end\n\n    it 'has a merged version of attributes' do\n      result_base = %w(id author_id title active kind created_at updated_at description url activated file post_id)\n      result_child = %w(id author_id title active kind created_at updated_at file post_id url activated)\n      result_child2 = %w(id author_id title active kind created_at updated_at description url activated)\n      result_other = %w(id name type specialty)\n\n      expect(base.inheritance_merged_attributes).to eql(result_base)\n      expect(child.inheritance_merged_attributes).to eql(result_child)\n      expect(child2.inheritance_merged_attributes).to eql(result_child2)\n      expect(other.inheritance_merged_attributes).to eql(result_other)\n    end\n\n    it 'identifies physical inheritance' do\n      expect(base.physically_inherited?).to be_falsey\n      expect(child.physically_inherited?).to be_truthy\n      expect(child2.physically_inherited?).to be_truthy\n      expect(other.physically_inherited?).to be_falsey\n    end\n\n    it 'returns a list of dependent tables' do\n      expect(base.inheritance_dependents).to eql(%w(activity_books activity_posts activity_post_samples))\n      expect(child.inheritance_dependents).to eql(%w(activity_post_samples))\n      expect(child2.inheritance_dependents).to eql(%w())\n      expect(other.inheritance_dependents).to eql(%w())\n    end\n\n    it 'can check dependency' do\n      expect(base.physically_inheritances?).to be_truthy\n      expect(child.physically_inheritances?).to be_truthy\n      expect(child2.physically_inheritances?).to be_falsey\n      expect(other.physically_inheritances?).to be_falsey\n    end\n\n    it 'returns the list of models that the records can be casted to' do\n      expect(base.casted_dependents.values.map(&:name)).to eql(%w(ActivityBook ActivityPost ActivityPost::Sample))\n      expect(child.casted_dependents.values.map(&:name)).to eql(%w(ActivityPost::Sample))\n      expect(child2.casted_dependents.values.map(&:name)).to eql(%w())\n      expect(other.casted_dependents.values.map(&:name)).to eql(%w())\n    end\n\n    it 'correctly generates the tables name' do\n      expect(base.table_name).to eql('activities')\n      expect(child.table_name).to eql('activity_posts')\n      expect(child2.table_name).to eql('activity_books')\n      expect(other.table_name).to eql('authors')\n    end\n\n    it 'respects the table name prefix and suffix defined on parent module' do\n      mod = Object.const_set('Private', Module.new)\n      mod.define_singleton_method(:table_name_prefix) { 'private.' }\n      mod.define_singleton_method(:table_name_suffix) { '_bundle' }\n      result = 'private.activity_post_others_bundle'\n\n      klass = mod.const_set('Other', Class.new(ActivityPost))\n      allow(klass).to receive(:module_parent).and_return(child)\n      allow(klass).to receive(:module_parents).and_return([mod])\n      allow(klass).to receive(:physically_inherited?).and_return(true)\n      expect(klass.send(:compute_table_name)).to be_eql(result)\n    end\n  end\n\n  context 'on relation' do\n    let(:base) { Activity }\n    let(:child) { ActivityBook }\n    let(:other) { AuthorJournalist }\n\n    it 'has operation methods' do\n      expect(base).to respond_to(:itself_only)\n      expect(base).to respond_to(:cast_records)\n      expect(base.new).to respond_to(:cast_record)\n    end\n\n    context 'itself only' do\n      it 'does not mess with original queries' do\n        expect(base.all.to_sql).to \\\n          eql('SELECT \"activities\".* FROM \"activities\"')\n      end\n\n      it 'adds the only condition to the query' do\n        expect(base.itself_only.to_sql).to \\\n          eql('SELECT \"activities\".* FROM ONLY \"activities\"')\n      end\n\n      it 'returns the right ammount of entries' do\n        base.create!(title: 'Activity only')\n        child.create!(title: 'Activity book')\n\n        expect(base.count).to eql(2)\n        expect(base.itself_only.count).to eql(1)\n        expect(child.count).to eql(1)\n      end\n    end\n\n    context 'cast records' do\n      before :each do\n        base.create(title: 'Activity test')\n        child.create(title: 'Activity book', url: 'bookurl1')\n        other.create(name: 'An author name')\n      end\n\n      it 'does not mess with single table inheritance' do\n        result = 'SELECT \"authors\".* FROM \"authors\"'\n        result << \" WHERE \\\"authors\\\".\\\"type\\\" = 'AuthorJournalist'\"\n        expect(other.all.to_sql).to eql(result)\n      end\n\n      it 'adds all statements to load all the necessary records' do\n        result = 'SELECT \"activities\".*, \"activities\".\"tableoid\"::regclass AS _record_class, \"i_0\".\"description\"'\n        result << ', COALESCE(\"i_0\".\"url\", \"i_1\".\"url\", \"i_2\".\"url\") AS url, \"i_0\".\"activated\" AS activity_books__activated'\n        result << ', \"i_1\".\"activated\" AS activity_posts__activated, \"i_2\".\"activated\" AS activity_post_samples__activated'\n        result << ', COALESCE(\"i_1\".\"file\", \"i_2\".\"file\") AS file, COALESCE(\"i_1\".\"post_id\", \"i_2\".\"post_id\") AS post_id'\n        result << \", \\\"activities\\\".\\\"tableoid\\\"::regclass::varchar IN ('activity_books', 'activity_posts', 'activity_post_samples') AS _auto_cast\"\n        result << ' FROM \"activities\"'\n        result << ' LEFT OUTER JOIN \"activity_books\" \"i_0\" ON \"activities\".\"id\" = \"i_0\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_posts\" \"i_1\" ON \"activities\".\"id\" = \"i_1\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_post_samples\" \"i_2\" ON \"activities\".\"id\" = \"i_2\".\"id\"'\n        expect(base.cast_records.all.to_sql).to eql(result)\n      end\n\n      it 'can be have simplefied joins' do\n        result = 'SELECT \"activities\".*, \"activities\".\"tableoid\"::regclass AS _record_class'\n        result << ', \"i_0\".\"description\", \"i_0\".\"url\", \"i_0\".\"activated\"'\n        result << \", \\\"activities\\\".\\\"tableoid\\\"::regclass::varchar IN ('activity_books') AS _auto_cast\"\n        result << ' FROM \"activities\"'\n        result << ' LEFT OUTER JOIN \"activity_books\" \"i_0\" ON \"activities\".\"id\" = \"i_0\".\"id\"'\n        expect(base.cast_records(child).all.to_sql).to eql(result)\n      end\n\n      it 'can be filtered by record type' do\n        result = 'SELECT \"activities\".*, \"activities\".\"tableoid\"::regclass AS _record_class'\n        result << ', \"i_0\".\"description\", \"i_0\".\"url\", \"i_0\".\"activated\"'\n        result << \", \\\"activities\\\".\\\"tableoid\\\"::regclass::varchar IN ('activity_books') AS _auto_cast\"\n        result << ' FROM \"activities\"'\n        result << ' LEFT OUTER JOIN \"activity_books\" \"i_0\" ON \"activities\".\"id\" = \"i_0\".\"id\"'\n        result << \" WHERE \\\"activities\\\".\\\"tableoid\\\"::regclass::varchar IN ('activity_books')\"\n        expect(base.cast_records(child, filter: true).all.to_sql).to eql(result)\n      end\n\n      it 'works with count and does not add extra columns' do\n        result = 'SELECT COUNT(*)'\n        result << ' FROM \"activities\"'\n        result << ' LEFT OUTER JOIN \"activity_books\" \"i_0\" ON \"activities\".\"id\" = \"i_0\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_posts\" \"i_1\" ON \"activities\".\"id\" = \"i_1\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_post_samples\" \"i_2\" ON \"activities\".\"id\" = \"i_2\".\"id\"'\n        query = get_last_executed_query{ base.cast_records.all.count }\n        expect(query).to eql(result)\n      end\n\n      it 'works with sum and does not add extra columns' do\n        result = 'SELECT SUM(\"activities\".\"id\")'\n        result << ' FROM \"activities\"'\n        result << ' LEFT OUTER JOIN \"activity_books\" \"i_0\" ON \"activities\".\"id\" = \"i_0\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_posts\" \"i_1\" ON \"activities\".\"id\" = \"i_1\".\"id\"'\n        result << ' LEFT OUTER JOIN \"activity_post_samples\" \"i_2\" ON \"activities\".\"id\" = \"i_2\".\"id\"'\n        query = get_last_executed_query{ base.cast_records.all.sum(:id) }\n        expect(query).to eql(result)\n      end\n\n      it 'returns the correct model object' do\n        ActivityPost.create(title: 'Activity post')\n        ActivityPost::Sample.create(title: 'Activity post')\n        records = base.cast_records.order(:id).load.to_a\n\n        expect(records[0]).to be_instance_of(Activity)\n        expect(records[1]).to be_instance_of(ActivityBook)\n        expect(records[2]).to be_instance_of(ActivityPost)\n        expect(records[3]).to be_instance_of(ActivityPost::Sample)\n      end\n\n      it 'does not cast unnecessary records' do\n        ActivityPost.create(title: 'Activity post')\n        records = base.cast_records(ActivityBook).order(:id).load.to_a\n\n        expect(records[0]).to be_instance_of(Activity)\n        expect(records[1]).to be_instance_of(ActivityBook)\n        expect(records[2]).to be_instance_of(Activity)\n      end\n\n      it 'correctly identifies same name attributes' do\n        ActivityPost.create(title: 'Activity post', url: 'posturl1')\n        records = base.cast_records.order(:id).load.to_a\n\n        expect(records[1].url).to eql('bookurl1')\n        expect(records[2].url).to eql('posturl1')\n      end\n\n      # TODO: Maybe in the future\n      xit 'does not make internal inheritance attributes accessible' do\n        record = base.cast_records.order(:id).load.last\n\n        expect(record).to be_instance_of(ActivityBook)\n        expect(record).not_to respond_to(:_record_class)\n        expect(record).not_to respond_to(:_auto_cast)\n      end\n    end\n\n    context 'cast record' do\n      before :each do\n        base.create(title: 'Activity test')\n        child.create(title: 'Activity book')\n        other.create(name: 'An author name')\n        base.instance_variable_set(:@casted_dependents, nil)\n      end\n\n      it 'does not affect normal records' do\n        expect(base.first.cast_record).to be_a(base)\n        expect(child.first.cast_record).to be_a(child)\n        expect(other.first.cast_record).to be_a(other)\n      end\n\n      it 'rises an error when the casted model cannot be defined' do\n        base.instance_variable_set(:@casted_dependents, {})\n        expect{ base.second.cast_record }.to raise_error(ArgumentError, /to type 'activity_books'/)\n      end\n\n      it 'can return the record class even when the auxiliary statement is not mentioned' do\n        expect(base.first._record_class).to eql('activities')\n        expect(base.second._record_class).to eql('activity_books')\n        expect(other.first._record_class).to eql('authors')\n      end\n\n      it 'does trigger record casting when accessed through inheritance' do\n        expect(base.second.cast_record).to eql(child.first)\n      end\n\n      context 'using uuid' do\n        let(:base) { Question }\n        let(:child) { QuestionSelect }\n\n        before :each do\n          base.create(title: 'Simple question')\n          child.create(title: 'Select question')\n          base.instance_variable_set(:@casted_dependents, nil)\n        end\n\n        it 'does not affect normal records' do\n          expect(base.first.cast_record).to be_a(base)\n          expect(child.first.cast_record).to be_a(child)\n        end\n\n        it 'does trigger record casting when accessed through inheritance' do\n          expect(base.second.cast_record).to eql(child.first)\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/tests/versioned_commands_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe 'VersionedCommands' do\n  let(:connection) { ActiveRecord::Base.connection }\n\n  context 'on migration' do\n    it 'does not have any of the schema methods' do\n      expect(connection).not_to respond_to(:create_function)\n      expect(connection).not_to respond_to(:create_type)\n      expect(connection).not_to respond_to(:create_view)\n    end\n\n    it 'does not have the methods available in a migration' do\n      instance = Class.new(ActiveRecord::Migration::Current).allocate\n      expect(instance).not_to respond_to(:create_function)\n      expect(instance).not_to respond_to(:create_type)\n      expect(instance).not_to respond_to(:create_view)\n    end\n\n    it 'does have the methods in schema definition' do\n      instance = ActiveRecord::Schema[ActiveRecord::Migration.current_version].allocate\n      expect(instance).to respond_to(:create_function)\n      expect(instance).to respond_to(:create_type)\n      expect(instance).to respond_to(:create_view)\n    end\n\n    context 'on context' do\n      let(:context) { connection.pool.migration_context }\n      let(:path) { Pathname.new(__FILE__).join('../../fixtures/migrations').expand_path.to_s }\n\n      before { context.instance_variable_set(:@migrations_paths, [path]) }\n\n      it 'list all migrations accordingly' do\n        result = context.migrations.map { |m| File.basename(m.filename) }\n        expect(result[0]).to eq('20250101000001_create_users.rb')\n        expect(result[1]).to eq('20250101000002_create_function_count_users_v1.sql')\n        expect(result[2]).to eq('20250101000003_create_internal_users.rb')\n        expect(result[3]).to eq('20250101000004_update_function_count_users_v2.sql')\n        expect(result[4]).to eq('20250101000005_create_view_all_users_v1.sql')\n        expect(result[5]).to eq('20250101000006_create_type_user_id_v1.sql')\n        expect(result[6]).to eq('20250101000007_remove_function_count_users_v2.sql')\n      end\n\n      it 'correctly report the status of all migrations' do\n        result = context.migrations_status.reject { |s| s[1].start_with?('0') }\n        expect(result[0]).to eq(['down', '20250101000001', 'Create users'])\n        expect(result[1]).to eq(['down', '20250101000002', 'Create Function count_users (v1)'])\n        expect(result[2]).to eq(['down', '20250101000003', 'Create internal users'])\n        expect(result[3]).to eq(['down', '20250101000004', 'Update Function count_users (v2)'])\n        expect(result[4]).to eq(['down', '20250101000005', 'Create View all_users (v1)'])\n        expect(result[5]).to eq(['down', '20250101000006', 'Create Type user_id (v1)'])\n        expect(result[6]).to eq(['down', '20250101000007', 'Remove Function count_users (v2)'])\n      end\n\n      it 'reports for invalid names' do\n        allow(context).to receive(:command_files).and_return(['something.sql'])\n        error = ::Torque::PostgreSQL::IllegalCommandTypeError\n        expect { context.migrations }.to raise_error(error)\n      end\n    end\n\n    context 'on validation' do\n      let(:base) { Torque::PostgreSQL::VersionedCommands }\n\n      context 'on function' do\n        it 'prevents multiple functions definition' do\n          content = <<~SQL\n            CREATE FUNCTION test(a integer);\n            CREATE FUNCTION other_test(a varchar);\n          SQL\n\n          expect do\n            base.validate!(:function, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'prevents same name but different schema' do\n          content = <<~SQL\n            CREATE FUNCTION internal.test(a integer);\n            CREATE FUNCTION external.test(a varchar);\n          SQL\n\n          expect do\n            base.validate!(:function, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires OR REPLACE clause' do\n          content = <<~SQL\n            CREATE OR REPLACE FUNCTION test(a integer);\n            CREATE FUNCTION test(a varchar);\n          SQL\n\n          expect do\n            base.validate!(:function, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires matching name' do\n          content = <<~SQL\n            CREATE OR REPLACE FUNCTION other_test(a integer);\n            CREATE OR REPLACE FUNCTION other_test(a varchar);\n          SQL\n\n          expect do\n            base.validate!(:function, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'works when setup correctly' do\n          content = <<~SQL\n            CREATE OR REPLACE FUNCTION test(a integer);\n            CREATE OR REPLACE FUNCTION test(a varchar);\n            CREATE OR REPLACE FUNCTION TEST(a date);\n          SQL\n\n          expect { base.validate!(:function, content, 'test') }.not_to raise_error\n        end\n\n        it 'supports name with schema' do\n          content = <<~SQL\n            CREATE OR REPLACE FUNCTION internal.test(a integer);\n            CREATE OR REPLACE FUNCTION internal.test(a varchar);\n            CREATE OR REPLACE FUNCTION internal.TEST(a date);\n          SQL\n\n          expect { base.validate!(:function, content, 'internal_test') }.not_to raise_error\n        end\n      end\n\n      context 'on type' do\n        it 'prevents multiple type definitions' do\n          content = <<~SQL\n            CREATE TYPE test AS;\n            CREATE TYPE other_test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'prevents same name but different schema' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS internal.test;\n            CREATE TYPE external.test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'prevents multiple type drops' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS test;\n            DROP TYPE IF EXISTS other_test;\n            CREATE TYPE test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires DROP TYPE clause' do\n          content = <<~SQL\n            CREATE TYPE test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'prevents dropping other types' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS other_test;\n            CREATE TYPE test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires matching name' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS other_test;\n            CREATE TYPE other_test AS;\n          SQL\n\n          expect do\n            base.validate!(:type, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'works when setup correctly' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS test;\n            CREATE TYPE TEST AS;\n          SQL\n\n          expect { base.validate!(:type, content, 'test') }.not_to raise_error\n        end\n\n        it 'supports name with schema' do\n          content = <<~SQL\n            DROP TYPE IF EXISTS internal.test;\n            CREATE TYPE INTERNAL.TEST AS;\n          SQL\n\n          expect { base.validate!(:type, content, 'internal_test') }.not_to raise_error\n        end\n      end\n\n      context 'on view' do\n        it 'requires a proper definition' do\n          content = <<~SQL\n            CREATE TEMP MATERIALIZED VIEW test AS;\n          SQL\n\n          expect do\n            base.validate!(:view, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n        it 'prevents multiple view definitions' do\n          content = <<~SQL\n            CREATE VIEW test AS;\n            CREATE VIEW other_test AS;\n          SQL\n\n          expect do\n            base.validate!(:view, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires OR REPLACE clause' do\n          content = <<~SQL\n            CREATE VIEW test AS;\n          SQL\n\n          expect do\n            base.validate!(:view, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'requires matching name' do\n          content = <<~SQL\n            CREATE OR REPLACE VIEW other_test AS;\n          SQL\n\n          expect do\n            base.validate!(:view, content, 'test')\n          end.to raise_error(ArgumentError)\n        end\n\n        it 'works when setup correctly' do\n          content = <<~SQL\n            CREATE OR REPLACE VIEW TEST AS;\n          SQL\n\n          expect { base.validate!(:view, content, 'test') }.not_to raise_error\n        end\n\n        it 'supports materialized views' do\n          content = <<~SQL\n            DROP MATERIALIZED VIEW IF EXISTS test;\n            CREATE MATERIALIZED VIEW test AS;\n          SQL\n\n          expect { base.validate!(:view, content, 'test') }.not_to raise_error\n        end\n\n        it 'supports name with schema' do\n          content = <<~SQL\n            CREATE OR REPLACE VIEW internal.test AS;\n          SQL\n\n          expect { base.validate!(:view, content, 'internal_test') }.not_to raise_error\n        end\n      end\n    end\n\n    context 'on running' do\n      let(:base) { Torque::PostgreSQL::VersionedCommands }\n      let(:sql) { 'CREATE TYPE test;' }\n      let(:command) do\n        base::CommandMigration.new('test.sql', 1, 'create', 'type', 'test', 1)\n      end\n\n      before do\n        allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages\n\n        allow(File).to receive(:expand_path, &:itself)\n        allow(File).to receive(:read).with('test.sql').and_return(sql)\n\n        # Validations are better tested above\n        allow(base).to receive(:validate!).and_return(true)\n      end\n\n      it 'has the right name' do\n        expect(command.name).to eq('create_type_test_v1')\n      end\n\n      it 'creates the type properly' do\n        expect(connection).to receive(:execute).with(sql)\n        command.migrate(:up)\n      end\n\n      it 'reverts to the previous file' do\n        sql2 = 'CREATE TYPE test_v1;'\n        command.op_version = 2\n        expect(base).to receive(:fetch_command).with(Array, 'type', 'test', 1).and_return(sql2)\n        expect(connection).to receive(:execute).with(sql2)\n        command.migrate(:down)\n      end\n\n      it 'reverts to the same version when reverting a remove' do\n        command.op = 'remove'\n        command.op_version = 2\n        expect(base).to receive(:fetch_command).with(Array, 'type', 'test', 2).and_return(sql)\n        expect(connection).to receive(:execute).with(sql)\n        command.migrate(:down)\n      end\n\n      it 'properly drops functions' do\n        command.type = 'function'\n\n        sql.replace('CREATE FUNCTION test;')\n        expect(connection).to receive(:execute).with('DROP FUNCTION test;')\n        command.migrate(:down)\n\n        sql.replace('CREATE FUNCTION test();')\n        expect(connection).to receive(:execute).with('DROP FUNCTION test();')\n        command.migrate(:down)\n\n        sql.replace('CREATE FUNCTION test(int); CREATE FUNCTION test(float);')\n        expect(connection).to receive(:execute).with('DROP FUNCTION test(int), test(float);')\n        command.migrate(:down)\n      end\n\n      it 'properly drops types' do\n        command.type = 'type'\n\n        sql.replace('CREATE TYPE test;')\n        expect(connection).to receive(:execute).with('DROP TYPE test;')\n        command.migrate(:down)\n      end\n\n      it 'properly drops views' do\n        command.type = 'view'\n\n        sql.replace('CREATE VIEW test AS SELECT 1;')\n        expect(connection).to receive(:execute).with('DROP VIEW test;')\n        command.migrate(:down)\n\n        sql.replace('CREATE MATERIALIZED VIEW test AS SELECT 1;')\n        expect(connection).to receive(:execute).with('DROP MATERIALIZED VIEW test;')\n        command.migrate(:down)\n\n        sql.replace('CREATE RECURSIVE VIEW test AS SELECT 1;')\n        expect(connection).to receive(:execute).with('DROP VIEW test;')\n        command.migrate(:down)\n      end\n    end\n\n    context 'on migrator' do\n      let(:base) { Torque::PostgreSQL::VersionedCommands }\n      let(:table) { base::SchemaTable.new(connection.pool) }\n      let(:context) { connection.pool.migration_context }\n      let(:versions) { migrations.map(&:version).map(&:to_i) }\n      let(:migrations) { [ActiveRecord::Migration.new('base', 1)] }\n\n      before do\n        allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages\n        allow(File).to receive(:expand_path, &:itself)\n\n        # Validations are better tested above\n        allow(base).to receive(:validate!).and_return(true)\n        allow(context).to receive(:migrations).and_return(migrations)\n        allow(context.schema_migration).to receive(:integer_versions).and_return(versions)\n      end\n\n      it 'expect the table to not exist by default' do\n        expect(table.table_exists?).to be_falsey\n      end\n\n      it 'creates the table on first migration' do\n        migration('CREATE TYPE test;')\n\n        expect(table.table_exists?).to be_falsey\n        context.up(2)\n        expect(table.table_exists?).to be_truthy\n        expect(table.count).to eq(1)\n        expect(table.versions_of('type')).to eq([['test_2', 1]])\n      end\n\n      it 'drops the table if all versions are removed' do\n        migrations << ActiveRecord::Migration.new('other', 2)\n        versions << 2\n\n        migration('CREATE TYPE test;')\n\n        expect(table.table_exists?).to be_falsey\n        context.up(3)\n        expect(table.table_exists?).to be_truthy\n        expect(table.count).to eq(1)\n\n        versions << 3\n        context.down(2)\n        expect(table.table_exists?).to be_falsey\n        expect(table.count).to eq(0)\n      end\n\n      it 'does no drop the table if there are still records' do\n        migration('CREATE TYPE test;')\n        migration('CREATE TYPE other;')\n\n        expect(table.table_exists?).to be_falsey\n        context.up(3)\n        expect(table.table_exists?).to be_truthy\n        expect(table.count).to eq(2)\n\n        versions << 2\n        versions << 3\n        context.down(2)\n        expect(table.table_exists?).to be_truthy\n        expect(table.count).to eq(1)\n      end\n\n      def migration(command)\n        version = migrations.size + 1\n        file = \"test_#{version}.sql\"\n        name = file.split('.').first\n        allow(File).to receive(:read).with(file).and_return(command)\n        migrations << base::CommandMigration.new(file, version, 'create', 'type', name, 1)\n      end\n    end\n  end\n\n  context 'on schema dumper' do\n    let(:source) { ActiveRecord::Base.connection_pool }\n    let(:schema_table) { double(commands_table.name) }\n    let(:commands_table) { Torque::PostgreSQL::VersionedCommands::SchemaTable }\n    let(:dump_result) do\n      ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))\n      dump_result.string\n    end\n\n    before do\n      allow(commands_table).to receive(:new).and_return(schema_table)\n      allow(schema_table).to receive(:versions_of).and_return([])\n      allow(schema_table).to receive(:table_name).and_return('versioned_commands_tbl')\n    end\n\n    it 'does not include versioned commands info by default' do\n      expect(dump_result).not_to include('\"versioned_commands_tbl\"')\n      expect(dump_result).not_to include('# These are types managed by versioned commands')\n      expect(dump_result).not_to include('# These are functions managed by versioned commands')\n      expect(dump_result).not_to include('# These are views managed by versioned commands')\n    end\n\n    it 'includes all types' do\n      connection.execute('CREATE TYPE test;')\n      connection.execute('CREATE TYPE internal.other;')\n\n      allow(schema_table).to receive(:versions_of).with('type').and_return([\n        ['test', 1],\n        ['internal_other', 2],\n        ['remove', 1],\n      ])\n\n      expect(dump_result).to include('# These are types managed by versioned commands')\n      expect(dump_result).to include('create_type \"test\", version: 1')\n      expect(dump_result).to include('create_type \"internal_other\", version: 2')\n      expect(dump_result).not_to include('create_type \"removed\", version: 1')\n    end\n\n    it 'includes all functions' do\n      body = 'RETURNS void AS $$ BEGIN NULL; END; $$ LANGUAGE plpgsql'\n      connection.execute(\"CREATE FUNCTION test() #{body};\")\n      connection.execute(\"CREATE FUNCTION internal.other() #{body};\")\n\n      allow(schema_table).to receive(:versions_of).with('function').and_return([\n        ['test', 1],\n        ['internal_other', 2],\n        ['remove', 1],\n      ])\n\n      expect(dump_result).to include('# These are functions managed by versioned commands')\n      expect(dump_result).to include('create_function \"test\", version: 1')\n      expect(dump_result).to include('create_function \"internal_other\", version: 2')\n      expect(dump_result).not_to include('create_function \"removed\", version: 1')\n    end\n\n    it 'includes all views' do\n      connection.execute('CREATE VIEW test AS SELECT 1;')\n      connection.execute('CREATE MATERIALIZED VIEW internal.other AS SELECT 2;')\n\n      allow(schema_table).to receive(:versions_of).with('view').and_return([\n        ['test', 1],\n        ['internal_other', 2],\n        ['remove', 1],\n      ])\n\n      expect(dump_result).to include('# These are views managed by versioned commands')\n      expect(dump_result).to include('create_view \"test\", version: 1')\n      expect(dump_result).to include('create_view \"internal_other\", version: 2')\n      expect(dump_result).not_to include('create_view \"removed\", version: 1')\n    end\n  end\nend\n"
  },
  {
    "path": "torque_postgresql.gemspec",
    "content": "$:.push File.expand_path('../lib', __FILE__)\n\n# Maintain your gem's version:\nrequire 'torque/postgresql/version'\nrequire 'date'\n\n# Describe your gem and declare its dependencies:\nGem::Specification.new do |s|\n  s.name        = 'torque-postgresql'\n  s.version     = Torque::PostgreSQL::VERSION\n  s.date        = Date.today.to_s\n  s.authors     = ['Carlos Silva']\n  s.email       = ['me@carlosfsilva.com']\n  s.homepage    = 'https://github.com/crashtech/torque-postgresql'\n  s.summary     = 'ActiveRecord extension to access PostgreSQL advanced resources'\n  s.description = 'Add support to complex resources of PostgreSQL, like data types, array associations, auxiliary statements (CTE), and full-text search (FTS)'\n  s.license     = 'MIT'\n  s.metadata    = {\n    'homepage_uri'    => 'https://torque.dev/postgresql',\n    \"source_code_uri\" => 'https://github.com/crashtech/torque-postgresql',\n    'bug_tracker_uri' => 'https://github.com/crashtech/torque-postgresql/issues',\n    'changelog_uri'   => 'https://github.com/crashtech/torque-postgresql/releases',\n  }\n\n  s.require_paths = ['lib']\n\n  s.files        = Dir['MIT-LICENSE', 'README.rdoc', 'lib/**/*', 'Rakefile']\n  s.test_files   = Dir['spec/**/*']\n  s.rdoc_options = ['--title', 'Torque PostgreSQL']\n\n  s.required_ruby_version     = '>= 3.2'\n  s.required_rubygems_version = '>= 1.8.11'\n\n  s.add_dependency 'rails', '~> 8.0'\n  s.add_dependency 'pg', '>= 1.2'\n\n  s.add_development_dependency 'rake', '~> 13.0'\n  s.add_development_dependency 'database_cleaner', '~> 2.0'\n  s.add_development_dependency 'dotenv', '~> 3.1'\n  s.add_development_dependency 'rspec', '~> 3.5'\n\n  s.add_development_dependency 'factory_bot', '~> 6.2'\n  s.add_development_dependency 'faker', '~> 3.5'\nend\n"
  }
]