Repository: crashtech/torque-postgresql
Branch: master
Commit: c654b22bf46b
Files: 173
Total size: 520.9 KB
Directory structure:
gitextract_u1kant5h/
├── .circleci/
│ └── config.yml
├── .github/
│ └── FUNDING.yml
├── .gitignore
├── .rspec
├── Gemfile
├── MIT-LICENSE
├── README.md
├── README.rdoc
├── Rakefile
├── gemfiles/
│ └── Gemfile.rails-8.0
├── lib/
│ ├── generators/
│ │ └── torque/
│ │ ├── function_generator.rb
│ │ ├── templates/
│ │ │ ├── function.sql.erb
│ │ │ ├── type.sql.erb
│ │ │ └── view.sql.erb
│ │ ├── type_generator.rb
│ │ └── view_generator.rb
│ ├── torque/
│ │ ├── postgresql/
│ │ │ ├── adapter/
│ │ │ │ ├── database_statements.rb
│ │ │ │ ├── oid/
│ │ │ │ │ ├── array.rb
│ │ │ │ │ ├── box.rb
│ │ │ │ │ ├── circle.rb
│ │ │ │ │ ├── enum.rb
│ │ │ │ │ ├── enum_set.rb
│ │ │ │ │ ├── interval.rb
│ │ │ │ │ ├── line.rb
│ │ │ │ │ ├── range.rb
│ │ │ │ │ └── segment.rb
│ │ │ │ ├── oid.rb
│ │ │ │ ├── quoting.rb
│ │ │ │ ├── schema_creation.rb
│ │ │ │ ├── schema_definitions.rb
│ │ │ │ ├── schema_dumper.rb
│ │ │ │ ├── schema_overrides.rb
│ │ │ │ └── schema_statements.rb
│ │ │ ├── adapter.rb
│ │ │ ├── arel/
│ │ │ │ ├── infix_operation.rb
│ │ │ │ ├── join_source.rb
│ │ │ │ ├── nodes.rb
│ │ │ │ ├── operations.rb
│ │ │ │ ├── select_manager.rb
│ │ │ │ └── visitors.rb
│ │ │ ├── arel.rb
│ │ │ ├── associations/
│ │ │ │ ├── association_scope.rb
│ │ │ │ ├── belongs_to_many_association.rb
│ │ │ │ ├── builder/
│ │ │ │ │ ├── belongs_to_many.rb
│ │ │ │ │ └── has_many.rb
│ │ │ │ ├── builder.rb
│ │ │ │ ├── foreign_association.rb
│ │ │ │ ├── preloader/
│ │ │ │ │ ├── association.rb
│ │ │ │ │ └── loader_query.rb
│ │ │ │ └── preloader.rb
│ │ │ ├── associations.rb
│ │ │ ├── attributes/
│ │ │ │ ├── builder/
│ │ │ │ │ ├── enum.rb
│ │ │ │ │ ├── full_text_search.rb
│ │ │ │ │ └── period.rb
│ │ │ │ ├── builder.rb
│ │ │ │ ├── enum.rb
│ │ │ │ ├── enum_set.rb
│ │ │ │ ├── full_text_search.rb
│ │ │ │ ├── lazy.rb
│ │ │ │ └── period.rb
│ │ │ ├── attributes.rb
│ │ │ ├── autosave_association.rb
│ │ │ ├── auxiliary_statement/
│ │ │ │ ├── recursive.rb
│ │ │ │ └── settings.rb
│ │ │ ├── auxiliary_statement.rb
│ │ │ ├── base.rb
│ │ │ ├── collector.rb
│ │ │ ├── config.rb
│ │ │ ├── function.rb
│ │ │ ├── geometry_builder.rb
│ │ │ ├── i18n.rb
│ │ │ ├── inheritance.rb
│ │ │ ├── insert_all.rb
│ │ │ ├── migration/
│ │ │ │ └── command_recorder.rb
│ │ │ ├── migration.rb
│ │ │ ├── predicate_builder/
│ │ │ │ ├── arel_attribute_handler.rb
│ │ │ │ ├── array_handler.rb
│ │ │ │ ├── enumerator_lazy_handler.rb
│ │ │ │ └── regexp_handler.rb
│ │ │ ├── predicate_builder.rb
│ │ │ ├── railtie.rb
│ │ │ ├── reflection/
│ │ │ │ ├── abstract_reflection.rb
│ │ │ │ ├── association_reflection.rb
│ │ │ │ ├── belongs_to_many_reflection.rb
│ │ │ │ ├── has_many_reflection.rb
│ │ │ │ ├── runtime_reflection.rb
│ │ │ │ └── through_reflection.rb
│ │ │ ├── reflection.rb
│ │ │ ├── relation/
│ │ │ │ ├── auxiliary_statement.rb
│ │ │ │ ├── buckets.rb
│ │ │ │ ├── distinct_on.rb
│ │ │ │ ├── inheritance.rb
│ │ │ │ ├── join_series.rb
│ │ │ │ └── merger.rb
│ │ │ ├── relation.rb
│ │ │ ├── schema_cache/
│ │ │ │ ├── bound_schema_reflection.rb
│ │ │ │ ├── inheritance.rb
│ │ │ │ └── schema_reflection.rb
│ │ │ ├── schema_cache.rb
│ │ │ ├── table_name.rb
│ │ │ ├── version.rb
│ │ │ ├── versioned_commands/
│ │ │ │ ├── command_migration.rb
│ │ │ │ ├── generator.rb
│ │ │ │ ├── migration_context.rb
│ │ │ │ ├── migrator.rb
│ │ │ │ └── schema_table.rb
│ │ │ └── versioned_commands.rb
│ │ └── postgresql.rb
│ └── torque-postgresql.rb
├── spec/
│ ├── en.yml
│ ├── factories/
│ │ ├── authors.rb
│ │ ├── comments.rb
│ │ ├── item.rb
│ │ ├── posts.rb
│ │ ├── tags.rb
│ │ ├── texts.rb
│ │ ├── users.rb
│ │ └── videos.rb
│ ├── fixtures/
│ │ └── migrations/
│ │ ├── 20250101000001_create_users.rb
│ │ ├── 20250101000002_create_function_count_users_v1.sql
│ │ ├── 20250101000003_create_internal_users.rb
│ │ ├── 20250101000004_update_function_count_users_v2.sql
│ │ ├── 20250101000005_create_view_all_users_v1.sql
│ │ ├── 20250101000006_create_type_user_id_v1.sql
│ │ └── 20250101000007_remove_function_count_users_v2.sql
│ ├── initialize.rb
│ ├── mocks/
│ │ ├── cache_query.rb
│ │ └── create_table.rb
│ ├── models/
│ │ ├── activity.rb
│ │ ├── activity_book.rb
│ │ ├── activity_post/
│ │ │ └── sample.rb
│ │ ├── activity_post.rb
│ │ ├── author.rb
│ │ ├── author_journalist.rb
│ │ ├── category.rb
│ │ ├── comment.rb
│ │ ├── course.rb
│ │ ├── geometry.rb
│ │ ├── guest_comment.rb
│ │ ├── internal/
│ │ │ └── user.rb
│ │ ├── item.rb
│ │ ├── post.rb
│ │ ├── question.rb
│ │ ├── question_select.rb
│ │ ├── tag.rb
│ │ ├── text.rb
│ │ ├── time_keeper.rb
│ │ ├── user.rb
│ │ └── video.rb
│ ├── schema.rb
│ ├── spec_helper.rb
│ └── tests/
│ ├── arel_spec.rb
│ ├── auxiliary_statement_spec.rb
│ ├── belongs_to_many_spec.rb
│ ├── collector_spec.rb
│ ├── distinct_on_spec.rb
│ ├── enum_set_spec.rb
│ ├── enum_spec.rb
│ ├── full_text_seach_test.rb
│ ├── function_spec.rb
│ ├── geometric_builder_spec.rb
│ ├── has_many_spec.rb
│ ├── insert_all_spec.rb
│ ├── interval_spec.rb
│ ├── lazy_spec.rb
│ ├── period_spec.rb
│ ├── predicate_builder_spec.rb
│ ├── quoting_spec.rb
│ ├── relation_spec.rb
│ ├── schema_spec.rb
│ ├── table_inheritance_spec.rb
│ └── versioned_commands_spec.rb
└── torque_postgresql.gemspec
================================================
FILE CONTENTS
================================================
================================================
FILE: .circleci/config.yml
================================================
version: 2.1
orbs:
ruby: circleci/ruby@1.4.0
jobs:
test:
parallelism: 3
parameters:
ruby-version:
type: string
bundle-version:
type: string
docker:
- image: cimg/ruby:<< parameters.ruby-version >>
- image: cimg/postgres:14.6
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: torque
POSTGRES_DB: torque_postgresql
steps:
- checkout
- run: ruby --version
- run:
command: 'bundle install --gemfile gemfiles/<< parameters.bundle-version >>'
name: Install Bundle
- run:
command: dockerize -wait tcp://localhost:5432 -timeout 1m
name: Wait for DB
- run:
command: 'bundle exec --gemfile gemfiles/<< parameters.bundle-version >> rspec'
name: Run Tests
environment:
DATABASE_URL: 'postgresql://postgres:torque@localhost/torque_postgresql'
references:
matrix_build: &matrix_build
test:
matrix:
parameters:
ruby-version: ['3.2', '3.3', '3.4']
bundle-version: ['Gemfile.rails-8.0']
workflows:
commit:
jobs:
- <<: *matrix_build
================================================
FILE: .github/FUNDING.yml
================================================
# These are supported funding model platforms
github: ['crashtech']
# patreon: # Replace with a single Patreon username
# open_collective: # Replace with a single Open Collective username
# ko_fi: # Replace with a single Ko-fi username
# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
# liberapay: # Replace with a single Liberapay username
# issuehunt: # Replace with a single IssueHunt username
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
# polar: # Replace with a single Polar username
# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
# thanks_dev: # Replace with a single thanks.dev username
# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
================================================
FILE: .gitignore
================================================
.env
*.gem
*.rbc
.bundle
.config
.yardoc
.byebug_history
.versions.conf
Gemfile.lock
coverage
doc/
.config
coverage/
InstalledFiles
pkg/
rdoc/
spec/reports/
spec/examples.txt
test/tmp/
test/version_tmp/
tmp/
bin/
.ruby-version
.ruby-gemset
gemfiles/*.lock
================================================
FILE: .rspec
================================================
--color
--require spec_helper
================================================
FILE: Gemfile
================================================
source 'https://rubygems.org'
# Declare your gem's dependencies in torque_postgresql.gemspec.
# Bundler will treat runtime dependencies like base dependencies, and
# development dependencies will be added by default to the :development group.
gemspec
# Declare any dependencies that are still in development here instead of in
# your gemspec. These might include edge Rails or gems from your path or
# Git. Remember to move these dependencies to your gemspec before releasing
# your gem to rubygems.org.
# To use a debugger
gem 'debug'
# Optional dependencies
gem 'annotate'
================================================
FILE: MIT-LICENSE
================================================
Copyright 2016 Carlos Silva
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
================================================
FILE: README.md
================================================
[](https://circleci.com/gh/crashtech/torque-postgresql/tree/master)
[](https://codeclimate.com/github/crashtech/torque-postgresql)
[](https://badge.fury.io/rb/torque-postgresql)
* [Wiki](https://github.com/crashtech/torque-postgresql/wiki)
* [Bugs](https://github.com/crashtech/torque-postgresql/issues)
* [TODO](https://github.com/crashtech/torque-postgresql/wiki/TODO)
# Description
`torque-postgresql` is a plugin that enhances Ruby on Rails enabling easy access to existing PostgreSQL advanced resources, such as data types and query statements. Its features are designed to be similar to Rails architecture and work as smoothly as possible.
Fully compatible with `schema.rb` and 100% plug-and-play, with optional configurations, so that it can be adapted to your project's design pattern.
# Installation
To install torque-postgresql you need to add the following to your Gemfile:
```ruby
gem 'torque-postgresql', '~> 2.0' # For Rails >= 6.0 < 6.1
gem 'torque-postgresql', '~> 2.0.4' # For Rails >= 6.1
gem 'torque-postgresql', '~> 3.0' # For Rails >= 7.0 < 7.1
gem 'torque-postgresql', '~> 3.3' # For Rails >= 7.1 < 7.2
gem 'torque-postgresql', '~> 3.4' # For Rails >= 7.2 < 8.0
gem 'torque-postgresql', '~> 4.0' # For Rails >= 8.0
```
Also, run:
```
$ bundle
```
Or, for non-Gemfile related usage, simply:
```
gem install torque-postgresql
```
# Usage
These are the currently available features:
* [Configuring](https://github.com/crashtech/torque-postgresql/wiki/Configuring)
## Data types
* [Box](https://github.com/crashtech/torque-postgresql/wiki/Box)
* [Circle](https://github.com/crashtech/torque-postgresql/wiki/Circle)
* [Date/Time Range](https://github.com/crashtech/torque-postgresql/wiki/Date-Time-Range)
* [Enum](https://github.com/crashtech/torque-postgresql/wiki/Enum)
* [EnumSet](https://github.com/crashtech/torque-postgresql/wiki/Enum-Set)
* [Interval](https://github.com/crashtech/torque-postgresql/wiki/Interval)
* [Line](https://github.com/crashtech/torque-postgresql/wiki/Line)
* [Segment](https://github.com/crashtech/torque-postgresql/wiki/Segment)
## Querying
* [Arel](https://github.com/crashtech/torque-postgresql/wiki/Arel)
* [Auxiliary Statements](https://github.com/crashtech/torque-postgresql/wiki/Auxiliary-Statements)
* [Belongs to Many](https://github.com/crashtech/torque-postgresql/wiki/Belongs-to-Many)
* [Distinct On](https://github.com/crashtech/torque-postgresql/wiki/Distinct-On)
* [Dynamic Attributes](https://github.com/crashtech/torque-postgresql/wiki/Dynamic-Attributes)
* [Has Many](https://github.com/crashtech/torque-postgresql/wiki/Has-Many)
* [Inherited Tables](https://github.com/crashtech/torque-postgresql/wiki/Inherited-Tables)
* [Insert All](https://github.com/crashtech/torque-postgresql/wiki/Insert-All)
* [Multiple Schemas](https://github.com/crashtech/torque-postgresql/wiki/Multiple-Schemas)
* [Predicate Builder](https://github.com/crashtech/torque-postgresql/wiki/Predicate-Builder)
* [Full‐Text Search](https://github.com/crashtech/torque-postgresql/wiki/Full‐Text-Search)
* [Join Series](https://github.com/crashtech/torque-postgresql/wiki/Join-Series)
* [Buckets](https://github.com/crashtech/torque-postgresql/wiki/Buckets)
* [Versioned Commands (Views, Functions, Types)](https://github.com/crashtech/torque-postgresql/wiki/Versioned-Commands)
# How to Contribute
To start, simply fork the project, create a `.env` file following this example:
```
DATABASE_URL="postgres://USER:PASSWORD@localhost/DATABASE"
```
Run local tests using:
```
$ bundle install
$ bundle exec rake spec
```
Finally, fix and send a pull request.
## License
Copyright © 2017- Carlos Silva. See [The MIT License](MIT-LICENSE) for further details.
================================================
FILE: README.rdoc
================================================
= Torque PostgreSQL -- Add support to complex resources of PostgreSQL, like data
types, user-defined types and auxiliary statements (CTE)
This is a plugin that enhance Ruby on Rails enabling easy access to existing
PostgreSQL advanced resources, such as data types and queries statements. Its
features are design to be as similar as Rails architecture and they work as
smooth as possible.
100% plug-and-play, with optional configurations so that can be adapted to
your's project design pattern.
A short rundown of some of the major features:
* Enum type manager
It creates a separated class to hold each enum set that can be used by multiple
models, it also keeps the database consistent. The enum type is known to have
better performance against string- and integer-like enums.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/datatype-enum.html]
create_enum :roles, %i(visitor manager admin)
add_column :users, :role, :roles
Enum::Roles.admin
Users.roles
{Learn more}[link:classes/Torque/PostgreSQL/Attributes/Enum.html]
* Enum set type manager
The enum type is known to have a better performance against string- and integer-
like enums. Now with the array option, which behaves like binary assignment,
each record can have multiple enum values.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/datatype-enum.html]
create_enum :permissions, %i(read write exec)
add_column :posts, :creator_permissions, :permissions, array: true
Enum::PermissionsSet.new(3) # [:read, :write]
post.creator_permissions.write?
{Learn more}[link:classes/Torque/PostgreSQL/Attributes/EnumSet.html]
* Period complex queries
This provides extended and complex calculations over date and time ranges. In a
few words, you can now store `start_time` and `finish_time` in the same column
and relies on the methods provided here to fo your magic.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/functions-range.html]
add_column :events, :period, :tsrange
add_column :events, :interval, :interval
Event.create(title: 'Test', period: ['2019-01-01 12:00:00', '2019-01-01 14:00:00'], interval: 15.minutes)
Event.overlapping('2019-01-01 13:00:00', '2019-01-01 15:00:00').count
Event.not_real_overlapping('2019-01-01 11:00:00', '2019-01-01 13:00:00').empty?
{Learn more}[link:classes/Torque/PostgreSQL/Attributes/Builder/Period.html]
* Has many array association
The idea is simple, one table stores all the ids and the other one says that
`has many` records on that table because its records ids exist in the column of
the array. Like: `Tag has many Videos connected through an array`.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/arrays.html]
add_column :videos, :tag_ids, :bigint, array: true
Tag.has_many :videos, array: true
Tag.videos.size
Tag.videos << another_video
{Learn more}[link:classes/Torque/PostgreSQL/Reflection/AbstractReflection.html]
* Belongs to many association
The original `belongs_to` associations define a `SingularAssociation`, which
means that it could be extended with `array: true`. In this case, I decided to
create my own `CollectionAssociation` called `belongs_to_many`, which behaves
similar to the single one, but storing and returning a list of records.
With this, now you can say things like `Project belongs to many employees`,
which is more syntactically correct than `Project has many employees`
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/arrays.html]
add_column :videos, :tag_ids, :bigint, array: true
Video.belongs_to_many :tags
Video.tags.size
Video.tags << Tag.new(title: 'rails')
{Learn more}[link:classes/Torque/PostgreSQL/Reflection/BelongsToManyReflection.html]
* Distinct On
MySQL-like group by statement on queries. It keeps only the first row of each
set of rows where the given expressions evaluate to equal.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/sql-select.html#SQL-DISTINCT]
User.distinct_on(:name).all
{Learn more}[link:classes/Torque/PostgreSQL/Relation/DistinctOn.html]
* Auxiliary Statements
Provides a way to write auxiliary statements for use in a larger query. It's
reconfigured on the model, and then can be used during querying process.
{PostgreSQL Docs}[https://www.postgresql.org/docs/9.6/static/queries-with.html]
class User < ActiveRecord::Base
auxiliary_statement :last_comment do |cte|
cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)
cte.attributes content: :last_comment_content
end
end
user = User.with(:last_comment).first
{Learn more}[link:classes/Torque/PostgreSQL/AuxiliaryStatement.html]
* Multiple Schemas
Allows models and modules to have a schema associated with them, so that
developers can better organize their tables into schemas and build features in
a way that the database can better represent how they are separated.
create_schema "internal", force: :cascade
module Internal
class User < ActiveRecord::Base
self.schema = 'internal'
end
end
Internal::User.all
{Learn more}[link:classes/Torque/PostgreSQL/Adapter/DatabaseStatements.html]
== Download and installation
The latest version of Torque PostgreSQL can be installed with RubyGems:
$ gem install torque-postgresql
Source code can be downloaded direct from the GitHub repository:
* https://github.com/crashtech/torque-postgresql
== License
Torque PostgreSQL is released under the MIT license:
* http://www.opensource.org/licenses/MIT
================================================
FILE: Rakefile
================================================
begin
require 'bundler/setup'
rescue LoadError
puts 'You must `gem install bundler` and `bundle install` to run rake tasks'
end
require 'rdoc/task'
RDoc::Task.new(:rdoc) do |rdoc|
rdoc.rdoc_dir = 'rdoc'
rdoc.title = 'Torque::Postgresql'
rdoc.options << '--line-numbers'
rdoc.rdoc_files.include('README.rdoc')
rdoc.rdoc_files.include('lib/**/*.rb')
end
desc 'Initialize the local environment'
task :environment do |t|
lib = File.expand_path('../lib', __FILE__)
spec = File.expand_path('../spec', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
$LOAD_PATH.unshift(spec) unless $LOAD_PATH.include?(spec)
end
desc 'Prints a schema dump of the test database'
task dump: :environment do |t|
require 'byebug'
require 'spec_helper'
ActiveRecord::SchemaDumper.dump
end
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new(:spec)
task default: :spec
================================================
FILE: gemfiles/Gemfile.rails-8.0
================================================
source 'https://rubygems.org'
gem 'rails', '~> 8.0', '< 8.1'
gem 'pg', '~> 1.4.0'
gemspec path: "../"
================================================
FILE: lib/generators/torque/function_generator.rb
================================================
# frozen_string_literal: true
require 'torque/postgresql/versioned_commands/generator'
module Torque
module Generators
class FunctionGenerator < Rails::Generators::Base
include Torque::PostgreSQL::VersionedCommands::Generator
alias create_function_file create_migration_file
end
end
end
================================================
FILE: lib/generators/torque/templates/function.sql.erb
================================================
CREATE OR REPLACE FUNCTION <%= name %>()
RETURNS void AS $$
-- Function body goes here
$$ LANGUAGE sql;
================================================
FILE: lib/generators/torque/templates/type.sql.erb
================================================
DROP TYPE IF EXISTS <%= name %>;
CREATE TYPE <%= name %>;
================================================
FILE: lib/generators/torque/templates/view.sql.erb
================================================
<%= "DROP MATERIALIZED VIEW IF EXISTS #{name};\n" if options[:materialized] %>CREATE <%= options[:materialized] ? 'MATERIALIZED' : 'OR REPLACE' %> VIEW <%= name %> AS (
-- View body goes here
);
================================================
FILE: lib/generators/torque/type_generator.rb
================================================
# frozen_string_literal: true
require 'torque/postgresql/versioned_commands/generator'
module Torque
module Generators
class TypeGenerator < Rails::Generators::Base
include Torque::PostgreSQL::VersionedCommands::Generator
alias create_type_file create_migration_file
end
end
end
================================================
FILE: lib/generators/torque/view_generator.rb
================================================
# frozen_string_literal: true
require 'torque/postgresql/versioned_commands/generator'
module Torque
module Generators
class ViewGenerator < Rails::Generators::Base
include Torque::PostgreSQL::VersionedCommands::Generator
class_option :materialized, type: :boolean, aliases: %i(--m), default: false,
desc: 'Use materialized view instead of regular view'
alias create_view_file create_migration_file
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/database_statements.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module DatabaseStatements
EXTENDED_DATABASE_TYPES = %i[enum enum_set interval]
# Switch between dump mode or not
def dump_mode!
@_dump_mode = !!!@_dump_mode
end
# List of schemas blocked by the application in the current connection
def schemas_blacklist
@schemas_blacklist ||= Torque::PostgreSQL.config.schemas.blacklist +
(@config.dig(:schemas, 'blacklist') || [])
end
# List of schemas used by the application in the current connection
def schemas_whitelist
@schemas_whitelist ||= Torque::PostgreSQL.config.schemas.whitelist +
(@config.dig(:schemas, 'whitelist') || [])
end
# A list of schemas on the search path sanitized
def schemas_search_path_sanitized
@schemas_search_path_sanitized ||= begin
db_user = @config[:username] || ENV['USER'] || ENV['USERNAME']
schema_search_path.split(',').map { |item| item.strip.sub('"$user"', db_user) }
end
end
# Check if a given type is valid.
def valid_type?(type)
super || extended_types.include?(type)
end
# Get the list of extended types
def extended_types
EXTENDED_DATABASE_TYPES
end
# Checks if a given schema exists in the database. If +filtered+ is
# given as false, then it will check regardless of whitelist and
# blacklist
def schema_exists?(name, filtered: true)
return user_defined_schemas.include?(name.to_s) if filtered
query_value(<<-SQL, "SCHEMA") == 1
SELECT 1 FROM pg_catalog.pg_namespace WHERE nspname = #{quote(name)}
SQL
end
# Returns true if type exists.
def type_exists?(name)
user_defined_types.key? name.to_s
end
alias data_type_exists? type_exists?
# Change some of the types being mapped
def initialize_type_map(m = type_map)
super
if PostgreSQL.config.geometry.enabled
m.register_type 'box', OID::Box.new
m.register_type 'circle', OID::Circle.new
m.register_type 'line', OID::Line.new
m.register_type 'segment', OID::Segment.new
end
if PostgreSQL.config.interval.enabled
m.register_type 'interval', OID::Interval.new
end
end
# :nodoc:
def load_additional_types(oids = nil)
type_map.alias_type 'regclass', 'varchar'
type_map.alias_type 'regconfig', 'varchar'
super
torque_load_additional_types(oids)
end
# Add the composite types to be loaded too.
def torque_load_additional_types(oids = nil)
return unless torque_load_additional_types?
# Types: (b)ase, (c)omposite, (d)omain, (e)num, (p)seudotype, (r)ange
# (m)ultirange
query = <<~SQL
SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput,
r.rngsubtype, t.typtype, t.typbasetype, t.typarray
FROM pg_type as t
LEFT JOIN pg_range as r ON oid = rngtypid
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
WHERE n.nspname NOT IN ('pg_catalog', 'information_schema')
SQL
if oids
query += " AND t.oid IN (%s)" % oids.join(", ")
else
query += " AND t.typtype IN ('e')"
end
options = { allow_retry: true, materialize_transactions: false }
internal_execute(query, 'SCHEMA', **options).each do |row|
if row['typtype'] == 'e' && PostgreSQL.config.enum.enabled
OID::Enum.create(row, type_map)
end
end
end
def torque_load_additional_types?
PostgreSQL.config.enum.enabled
end
# Gets a list of user defined types.
# You can even choose the +category+ filter
def user_defined_types(*categories)
categories = categories.compact.presence || %w[c e p r m]
query(<<-SQL, 'SCHEMA').to_h
SELECT t.typname, t.typtype
FROM pg_type as t
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
WHERE n.nspname NOT IN ('pg_catalog', 'information_schema')
AND t.typtype IN ('#{categories.join("', '")}')
SQL
end
# Get the list of inherited tables associated with their parent tables
def inherited_tables
tables = query(<<-SQL, 'SCHEMA')
SELECT inhrelid::regclass AS table_name,
inhparent::regclass AS inheritances
FROM pg_inherits
JOIN pg_class parent ON pg_inherits.inhparent = parent.oid
JOIN pg_class child ON pg_inherits.inhrelid = child.oid
ORDER BY inhrelid
SQL
tables.each_with_object({}) do |(child, parent), result|
(result[child] ||= []) << parent
end
end
# Get the list of schemas that were created by the user
def user_defined_schemas
query_values(user_defined_schemas_sql, 'SCHEMA')
end
# Build the query for allowed schemas
def user_defined_schemas_sql
<<-SQL.squish
SELECT nspname
FROM pg_catalog.pg_namespace
WHERE 1=1 AND #{filter_by_schema.join(' AND ')}
ORDER BY oid
SQL
end
# Get the list of columns, and their definition, but only from the
# actual table, does not include columns that comes from inherited table
def column_definitions(table_name)
query(<<~SQL, "SCHEMA")
SELECT a.attname, format_type(a.atttypid, a.atttypmod),
pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
c.collname, col_description(a.attrelid, a.attnum) AS comment,
#{supports_identity_columns? ? 'attidentity' : quote('')} AS identity,
#{supports_virtual_columns? ? 'attgenerated' : quote('')} as attgenerated
FROM pg_attribute a
LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
LEFT JOIN pg_type t ON a.atttypid = t.oid
LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation
WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass
AND a.attnum > 0 AND NOT a.attisdropped
#{'AND a.attislocal' if @_dump_mode}
ORDER BY a.attnum
SQL
end
# Get all possible schema entries that can be created via versioned
# commands of the provided type. Mostly for covering removals and not
# dump them
def list_versioned_commands(type)
query =
case type
when :function
<<-SQL.squish
SELECT n.nspname AS schema, p.proname AS name
FROM pg_catalog.pg_proc p
INNER JOIN pg_namespace n ON n.oid = p.pronamespace
WHERE 1=1 AND #{filter_by_schema.join(' AND ')};
SQL
when :type
<<-SQL.squish
SELECT n.nspname AS schema, t.typname AS name
FROM pg_type t
INNER JOIN pg_namespace n ON n.oid = t.typnamespace
WHERE 1=1 AND t.typtype NOT IN ('e')
AND #{filter_by_schema.join(' AND ')};
SQL
when :view
<<-SQL.squish
SELECT n.nspname AS schema, c.relname AS name
FROM pg_class c
INNER JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE 1=1 AND c.relkind IN ('v', 'm')
AND #{filter_by_schema.join(' AND ')};
SQL
end
select_rows(query, 'SCHEMA')
end
# Build the condition for filtering by schema
def filter_by_schema
conditions = []
conditions << <<-SQL.squish if schemas_blacklist.any?
nspname NOT LIKE ALL (ARRAY['#{schemas_blacklist.join("', '")}'])
SQL
conditions << <<-SQL.squish if schemas_whitelist.any?
nspname LIKE ANY (ARRAY['#{schemas_whitelist.join("', '")}'])
SQL
conditions
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/array.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module OID
module Array
def force_equality?(value)
PostgreSQL.config.predicate_builder.handle_array_attributes ? false : super
end
end
::ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array.prepend(Array)
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/box.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class Box < Struct.new(:x1, :y1, :x2, :y2)
def points
klass = Torque::PostgreSQL.config.geometry.point_class
[
klass.new(x1, y1),
klass.new(x1, y2),
klass.new(x2, y1),
klass.new(x2, y2),
]
end
end
config.geometry.box_class ||= ::ActiveRecord.const_set('Box', Class.new(Box))
module Adapter
module OID
class Box < Torque::PostgreSQL::GeometryBuilder
PIECES = %i[x1 y1 x2 y2].freeze
FORMATION = '((%s,%s),(%s,%s))'.freeze
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/circle.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class Circle < Struct.new(:x, :y, :r)
alias radius r
alias radius= r=
def center
point_class.new(x, y)
end
def center=(value)
parts = value.is_a?(point_class) ? [value.x, value.y] : value[0..1]
self.x = parts.first
self.y = parts.last
end
private
def point_class
Torque::PostgreSQL.config.geometry.point_class
end
end
config.geometry.circle_class ||= ::ActiveRecord.const_set('Circle', Class.new(Circle))
module Adapter
module OID
class Circle < Torque::PostgreSQL::GeometryBuilder
PIECES = %i[x y r].freeze
FORMATION = '<(%s,%s),%s>'.freeze
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/enum.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module OID
class Enum < ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Enum
attr_reader :name, :klass, :set_klass, :enum_klass
def self.create(row, type_map)
name = row['typname']
oid = row['oid'].to_i
arr_oid = row['typarray'].to_i
oid_klass = Enum.new(name)
oid_set_klass = EnumSet.new(name, oid_klass.klass)
oid_klass.instance_variable_set(:@set_klass, oid_set_klass)
type_map.register_type(oid, oid_klass)
type_map.register_type(arr_oid, oid_set_klass)
end
def initialize(name)
@name = name
@klass = Attributes::Enum.lookup(name)
@enum_klass = self
end
def hash
[self.class, name].hash
end
def serialize(value)
return if value.blank?
value = cast_value(value)
value.to_s unless value.nil?
end
def assert_valid_value(value)
cast_value(value)
end
# Always use symbol value for schema dumper
def type_cast_for_schema(value)
cast_value(value).to_sym.inspect
end
def ==(other)
self.class == other.class &&
other.klass == klass &&
other.type == type
end
private
def cast_value(value)
return if value.blank?
return value if value.is_a?(@klass)
@klass.new(value)
rescue Attributes::Enum::EnumError
nil
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/enum_set.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module OID
class EnumSet < Enum
def initialize(name, enum_klass)
@name = name + '[]'
@klass = Attributes::EnumSet.lookup(name, enum_klass)
@set_klass = self
@enum_klass = enum_klass
end
def type
:enum
end
def deserialize(value)
return unless value.present?
value = value[1..-2].split(',') if value.is_a?(String)
cast_value(value)
end
def serialize(value)
return if value.blank?
value = cast_value(value)
return if value.blank?
"{#{value.map(&:to_s).join(',')}}"
end
# Always use symbol values for schema dumper
def type_cast_for_schema(value)
cast_value(value).map(&:to_sym).inspect
end
private
def cast_value(value)
return if value.blank?
return value if value.is_a?(@klass)
@klass.new(value)
rescue Attributes::EnumSet::EnumSetError
nil
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/interval.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module OID
class Interval < ActiveModel::Type::Value
CAST_PARTS = [:years, :months, :days, :hours, :minutes, :seconds]
def type
:interval
end
# Accepts database-style string, numeric as seconds, array of parts
# padded to left, or a hash
#
# Examples:
# [12, 0, 0]
# produces: 12 hours, 0 minutes, and 0 seconds
#
# [nil, nil, 3, 0, 0, 0]
# produces: 3 days, 0 hours, 0 minutes, and 0 seconds
#
# {minutes: 12, seconds: 0}
# produces: 12 minutes, and 0 seconds
def cast(value)
return if value.blank?
case value
when ::String then deserialize(value)
when ::ActiveSupport::Duration then value
when ::Numeric
parts = CAST_PARTS.map do |part|
rest, value = value.divmod(1.send(part))
rest == 0 ? nil : [part, rest]
end
parts_to_duration(parts.compact)
when ::Array
value.compact!
parts = CAST_PARTS.drop(6 - value.size).zip(value).to_h
parts_to_duration(parts)
when ::Hash
parts_to_duration(value)
else
value
end
end
# Uses the ActiveSupport::Duration::ISO8601Parser
# See ActiveSupport::Duration#parse
# The value must be Integer when no precision is given
def deserialize(value)
return if value.blank?
ActiveSupport::Duration.parse(value)
end
# Uses the ActiveSupport::Duration::ISO8601Serializer
# See ActiveSupport::Duration#iso8601
def serialize(value)
return if value.blank?
value = cast(value) unless value.is_a?(ActiveSupport::Duration)
value = remove_weeks(value) if value.parts.to_h.key?(:weeks)
value.iso8601(precision: @scale)
end
# Always use the numeric value for schema dumper
def type_cast_for_schema(value)
cast(value).value.inspect
end
# Check if the user input has the correct format
def assert_valid_value(value)
# TODO: Implement!
end
# Transform a list of parts into a duration object
def parts_to_duration(parts)
parts = parts.to_h.slice(*CAST_PARTS)
return 0.seconds if parts.blank?
seconds = 0
parts = parts.map do |part, num|
num = num.to_i unless num.is_a?(Numeric)
next if num <= 0
seconds += num.send(part).value
[part.to_sym, num]
end
ActiveSupport::Duration.new(seconds, parts.compact)
end
# As PostgreSQL converts weeks in duration to days, intercept duration
# values with weeks and turn them into days before serializing so it
# won't break because the following issues
# https://github.com/crashtech/torque-postgresql/issues/26
# https://github.com/rails/rails/issues/34655
def remove_weeks(value)
parts = value.parts.dup
parts[:days] += parts.delete(:weeks) * 7
ActiveSupport::Duration.new(value.seconds.to_i, parts)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/line.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class Line < Struct.new(:slope, :intercept)
alias c intercept
alias c= intercept=
def a=(value)
self.slope = vertical? ? Float::INFINITY : Rational(value, b)
end
def a
slope.numerator
end
def b=(value)
self.slope = value.zero? ? Float::INFINITY : Rational(a, value)
end
def b
vertical? ? 0 : slope.denominator
end
def horizontal?
slope.zero?
end
def vertical?
!slope.try(:infinite?).eql?(nil)
end
end
config.geometry.line_class ||= ::ActiveRecord.const_set('Line', Class.new(Line))
module Adapter
module OID
class Line < Torque::PostgreSQL::GeometryBuilder
PIECES = %i[a b c].freeze
FORMATION = '{%s,%s,%s}'.freeze
protected
def build_klass(*args)
return nil if args.empty?
check_invalid_format!(args)
a, b, c = args.try(:first, pieces.size)&.map(&:to_f)
slope = b.zero? ? Float::INFINITY : Rational(a, b)
config_class.new(slope, c)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/range.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module OID
class Range < ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Range
HASH_PICK = %i[from start end to].freeze
module Comparison
def <=>(other)
return super unless other.acts_like?(:date) || other.acts_like?(:time)
other = other.to_time if other.acts_like?(:date)
super other.to_i
end
end
def cast_value(value)
case value
when ::Array
cast_custom(value[0], value[1])
when ::Hash
pieces = value.with_indifferent_access.values_at(*HASH_PICK)
cast_custom(pieces[0] || pieces[1], pieces[2] || pieces[3])
else
super
end
end
def map(value) # :nodoc:
return value unless value.respond_to?(:first)
from = yield(value.first)
to = yield(value.last)
cast_custom(from, to)
end
private
def cast_custom(from, to)
from = custom_cast_single(from, true)
to = custom_cast_single(to)
::Range.new(from, to)
end
def custom_cast_single(value, negative = false)
value.blank? ? custom_infinity(negative) : subtype.deserialize(value)
end
def custom_infinity(negative)
negative ? -::Float::INFINITY : ::Float::INFINITY
end
end
::ActiveRecord::ConnectionAdapters::PostgreSQL::OID.send(:remove_const, :Range)
::ActiveRecord::ConnectionAdapters::PostgreSQL::OID.const_set(:Range, Range)
::Float.prepend(Range::Comparison)
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid/segment.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class Segment < Struct.new(:point0, :point1)
def x1=(value)
self.point0 = new_point(value, y1)
end
def x1
point0.x
end
def y1=(value)
self.point0 = new_point(x1, value)
end
def y1
point0.y
end
def x2=(value)
self.point1 = new_point(value, y2)
end
def x2
point1.x
end
def y2=(value)
self.point1 = new_point(x2, value)
end
def y2
point1.y
end
private
def new_point(x, y)
Torque::PostgreSQL.config.geometry.point_class.new(x, y)
end
end
config.geometry.segment_class ||= ::ActiveRecord.const_set('Segment', Class.new(Segment))
module Adapter
module OID
class Segment < Torque::PostgreSQL::GeometryBuilder
PIECES = %i[x1 y1 x2 y2].freeze
FORMATION = '((%s,%s),(%s,%s))'.freeze
protected
def point_class
Torque::PostgreSQL.config.geometry.point_class
end
def build_klass(*args)
return nil if args.empty?
check_invalid_format!(args)
x1, y1, x2, y2 = args.try(:first, pieces.size)&.map(&:to_f)
config_class.new(
point_class.new(x1, y1),
point_class.new(x2, y2),
)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/oid.rb
================================================
require_relative 'oid/array'
require_relative 'oid/range'
================================================
FILE: lib/torque/postgresql/adapter/quoting.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module Quoting
QUOTED_TYPE_NAMES = Concurrent::Map.new
Name = ActiveRecord::ConnectionAdapters::PostgreSQL::Name
Column = ActiveRecord::ConnectionAdapters::PostgreSQL::Column
ColumnDefinition = ActiveRecord::ConnectionAdapters::ColumnDefinition
Utils = ActiveRecord::ConnectionAdapters::PostgreSQL::Utils
# Quotes type names for use in SQL queries.
def quote_type_name(name, *args)
QUOTED_TYPE_NAMES[args] ||= begin
name = name.to_s
args << 'public' if args.empty? && !name.include?('.')
quote_identifier_name(name, *args)
end
end
# Make sure to support all sorts of different compositions of names
def quote_identifier_name(name, schema = nil)
name = Utils.extract_schema_qualified_name(name.to_s) unless name.is_a?(Name)
name.instance_variable_set(:@schema, Utils.unquote_identifier(schema.to_s)) if schema
name.quoted.freeze
end
def quote_default_expression(value, column)
return super unless value.class <= Array || value.class <= Set
type =
if column.is_a?(ColumnDefinition) && column.options.try(:[], :array)
# This is the general way
lookup_cast_type(column.sql_type)
elsif column.is_a?(Column) && column.array?
# When using +change_column_default+
lookup_cast_type_from_column(column)
end
type.nil? ? super : quote(type.serialize(value.to_a))
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/schema_creation.rb
================================================
module Torque
module PostgreSQL
module Adapter
module SchemaCreation
# Inherits are now setup via table options, but keep the implementation
# supported by this gem
def add_table_options!(create_sql, o)
if o.inherits.present?
# Make sure we always have parenthesis
create_sql << '()' unless create_sql[-1] == ')'
tables = o.inherits.map(&method(:quote_table_name))
create_sql << " INHERITS ( #{tables.join(' , ')} )"
end
super(create_sql, o)
end
end
ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaCreation.prepend SchemaCreation
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/schema_definitions.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module ColumnMethods
# Adds a search language column to the table. See +add_search_language+
def search_language(*names, **options)
raise ArgumentError, "Missing column name(s) for search_language" if names.empty?
names.each { |name| column(name, :regconfig, **options) }
end
# Add a search vector column to the table. See +add_search_vector+
def search_vector(*names, columns:, **options)
raise ArgumentError, "Missing column name(s) for search_vector" if names.empty?
options = Attributes::Builder.search_vector_options(columns: columns, **options)
names.each { |name| column(name, :virtual, **options) }
end
end
module TableDefinition
include ColumnMethods
attr_reader :inherits
def initialize(*args, **options)
super
@inherits = Array.wrap(options.delete(:inherits)).flatten.compact \
if options.key?(:inherits)
end
def set_primary_key(tn, id, primary_key, *, **)
super unless @inherits.present? && primary_key.blank? && id == :primary_key
end
private
def create_column_definition(name, type, options)
if type == :enum_set
type = :enum
options ||= {}
options[:array] = true
end
super(name, type, options)
end
end
# Add exclusive support for versioned commands when importing from schema
# dump. This ensures that such methods are not available in regular
# migrations.
module Definition
def create_function(name, version:, dir: pool.migrations_paths)
return super unless VersionedCommands.valid_type?(:function)
execute VersionedCommands.fetch_command(dir, :function, name, version)
end
def create_type(name, version:, dir: pool.migrations_paths)
return super unless VersionedCommands.valid_type?(:type)
execute VersionedCommands.fetch_command(dir, :type, name, version)
end
def create_view(name, version:, dir: pool.migrations_paths)
return super unless VersionedCommands.valid_type?(:view)
execute VersionedCommands.fetch_command(dir, :view, name, version)
end
end
ActiveRecord::ConnectionAdapters::PostgreSQL::Table.include ColumnMethods
ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition.include TableDefinition
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/schema_dumper.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module SchemaDumper
SEARCH_VECTOR_SCANNER = /
to_tsvector\(
('[^']+'|[a-z][a-z0-9_]*)[^,]*,[^\(]*
\(?coalesce\(([a-z][a-z0-9_]*)[^\)]*\)\)?
(?:::[^\)]*\))?
(?:\s*,\s*'([A-D])')?
/ix
def initialize(*)
super
if with_versioned_commands?
@versioned_commands = VersionedCommands::SchemaTable.new(@connection.pool)
@ignore_tables << @versioned_commands.table_name
end
end
def dump(stream) # :nodoc:
@connection.dump_mode!
super
@connection.dump_mode!
stream
end
private
def types(stream) # :nodoc:
super
versioned_commands(stream, :type)
versioned_commands(stream, :function)
end
def tables(stream) # :nodoc:
around_tables(stream) { dump_tables(stream) }
end
def around_tables(stream)
functions(stream) if fx_functions_position == :beginning
yield
versioned_commands(stream, :view, true)
functions(stream) if fx_functions_position == :end
triggers(stream) if defined?(::Fx::SchemaDumper::Trigger)
end
def dump_tables(stream)
inherited_tables = @connection.inherited_tables
sorted_tables = (@connection.tables - @connection.views).filter_map do |table_name|
name_parts = table_name.split(/(?:public)?\./).reverse.compact_blank
next if ignored?(table_name) || ignored?(name_parts.join('.'))
[table_name, name_parts]
end.sort_by(&:last).to_h
postponed = []
stream.puts " # These are the common tables"
sorted_tables.each do |table, (table_name, _)|
next postponed << table if inherited_tables.key?(table_name)
table(table, stream)
stream.puts # Ideally we would not do this in the last one
end
if postponed.present?
stream.puts " # These are tables that have inheritance"
postponed.each do |table|
sub_stream = StringIO.new
table(table, sub_stream)
stream.puts sub_stream.string.sub(/do \|t\|\n end/, '')
stream.puts
end
end
# Fixes double new lines to single new lines
stream.pos -= 1
# dump foreign keys at the end to make sure all dependent tables exist.
if @connection.supports_foreign_keys?
foreign_keys_stream = StringIO.new
sorted_tables.each do |(tbl, *)|
foreign_keys(tbl, foreign_keys_stream)
end
foreign_keys_string = foreign_keys_stream.string
stream.puts if foreign_keys_string.length > 0
stream.print foreign_keys_string
end
end
# Make sure to remove the schema from the table name
def remove_prefix_and_suffix(table)
super(table.sub(/\A[a-z0-9_]*\./, ''))
end
# Dump user defined schemas
def schemas(stream)
return super if !PostgreSQL.config.schemas.enabled
return if (list = (@connection.user_defined_schemas - ['public'])).empty?
stream.puts " # Custom schemas defined in this database."
list.each { |name| stream.puts " create_schema \"#{name}\", force: :cascade" }
stream.puts
end
# Adjust the schema type for search vector
def schema_type_with_virtual(column)
column.virtual? && column.type == :tsvector ? :search_vector : super
end
# Adjust the schema type for search language
def schema_type(column)
column.sql_type == 'regconfig' ? :search_language : super
end
# Adjust table options to make the dump more readable
def prepare_column_options(column)
options = super
parse_search_vector_options(column, options) if column.type == :tsvector
options
end
# Parse the search vector operation into a readable format
def parse_search_vector_options(column, options)
settings = options[:as]&.scan(SEARCH_VECTOR_SCANNER)
return if settings.blank?
languages = settings.map(&:shift).uniq
return if languages.many?
language = languages.first
language = language[0] == "'" ? language[1..-2] : language.to_sym
columns = parse_search_vector_columns(settings)
options.except!(:as, :type)
options.merge!(language: language.inspect, columns: columns)
end
# Simplify the whole columns configuration to make it more manageable
def parse_search_vector_columns(settings)
return ":#{settings.first.first}" if settings.one?
settings = settings.sort_by(&:last)
weights = %w[A B C D]
columns = settings.each.with_index.reduce([]) do |acc, (setting, index)|
column, weight = setting
break if (weights[index] || 'D') != weight
acc << column
acc
end
return columns.map(&:to_sym).inspect if columns
settings.to_h.transform_values(&:inspect)
end
# Simply add all versioned commands to the stream
def versioned_commands(stream, type, add_newline = false)
return unless with_versioned_commands?
list = @versioned_commands.versions_of(type.to_s)
return if list.empty?
existing = list_existing_versioned_commands(type)
stream.puts if add_newline
stream.puts " # These are #{type.to_s.pluralize} managed by versioned commands"
list.each do |(name, version)|
next if existing.exclude?(name)
stream.puts " create_#{type} \"#{name}\", version: #{version}"
end
stream.puts unless add_newline
end
def list_existing_versioned_commands(type)
@connection.list_versioned_commands(type).each_with_object(Set.new) do |entry, set|
set << (entry.first == 'public' ? entry.last : entry.join('_'))
end
end
def with_versioned_commands?
PostgreSQL.config.versioned_commands.enabled
end
def fx_functions_position
return unless defined?(::Fx::SchemaDumper::Function)
Fx.configuration.dump_functions_at_beginning_of_schema ? :beginning : :end
end
end
ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaDumper.prepend SchemaDumper
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/schema_overrides.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module SchemaOverrides
# This adds better support for handling the quotation of table names
def quote_table_name(name)
ActiveRecord::ConnectionAdapters::PostgreSQL::Quoting::QUOTED_TABLE_NAMES.then do |m|
m[name] ||= quote_identifier_name(name)
end
end
%i[
table_exists? indexes index_exists? columns column_exists? primary_key
create_table change_table add_column add_columns remove_columns remove_column
change_column change_column_default change_column_null rename_column
add_index remove_index rename_index index_name_exists? foreign_keys
add_timestamps remove_timestamps change_table_comment change_column_comment
bulk_change_table
rename_table add_foreign_key remove_foreign_key foreign_key_exists?
].each do |method_name|
define_method(method_name) do |table_name, *args, **options, &block|
table_name = sanitize_name_with_schema(table_name, options)
super(table_name, *args, **options, &block)
end
end
def drop_table(*table_names, **options)
table_names = table_names.map { |name| sanitize_name_with_schema(name, options.dup) }
super(*table_names, **options)
end
private
def validate_table_length!(table_name)
super(table_name.to_s)
end
end
include SchemaOverrides
end
end
end
================================================
FILE: lib/torque/postgresql/adapter/schema_statements.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Adapter
module SchemaStatements
# Drops a type
def drop_type(name, options = {})
force = options.fetch(:force, '').upcase
check = 'IF EXISTS' if options.fetch(:check, true)
name = sanitize_name_with_schema(name, options)
internal_exec_query(<<-SQL.squish).tap { reload_type_map }
DROP TYPE #{check}
#{quote_type_name(name)} #{force}
SQL
end
# Renames a type
def rename_type(type_name, new_name, options = {})
type_name = sanitize_name_with_schema(type_name, options)
internal_exec_query(<<-SQL.squish).tap { reload_type_map }
ALTER TYPE #{quote_type_name(type_name)}
RENAME TO #{Quoting::Name.new(nil, new_name.to_s).quoted}
SQL
end
# Creates a column that stores the underlying language of the record so
# that a search vector can be created dynamically based on it. It uses
# a `regconfig` type, so string conversions are mandatory
def add_search_language(table, name, options = {})
add_column(table, name, :regconfig, options)
end
# Creates a column and setup a search vector as a virtual column. The
# options are dev-friendly and controls how the vector function will be
# defined
#
# === Options
# [:columns]
# The list of columns that will be used to create the search vector.
# It can be a single column, an array of columns, or a hash as a
# combination of column name and weight (A, B, C, or D).
# [:language]
# Specify the language config to be used for the search vector. If a
# string is provided, then the value will be statically embedded. If a
# symbol is provided, then it will reference another column.
# [:stored]
# Specify if the value should be stored in the database. As of now,
# PostgreSQL only supports `true`, which will create a stored column.
def add_search_vector(table, name, columns, options = {})
options = Builder.search_vector_options(columns: columns, **options)
add_column(table, name, options.delete(:type), options)
end
# Changes the enumerator by adding new values
#
# Example:
# add_enum_values 'status', ['baz']
# add_enum_values 'status', ['baz'], before: 'bar'
# add_enum_values 'status', ['baz'], after: 'foo'
# add_enum_values 'status', ['baz'], prepend: true
def add_enum_values(name, values, options = {})
name = sanitize_name_with_schema(name, options)
before = options.fetch(:before, false)
after = options.fetch(:after, false)
before = enum_values(name).first if options.key? :prepend
before = quote(before) unless before == false
after = quote(after) unless after == false
quote_enum_values(name, values, options).each do |value|
reference = "BEFORE #{before}" unless before == false
reference = "AFTER #{after}" unless after == false
execute <<-SQL.squish
ALTER TYPE #{quote_type_name(name)}
ADD VALUE #{value} #{reference}
SQL
before = false
after = value
end
end
# Returns all values that an enum type can have.
def enum_values(name)
select_values(<<-SQL.squish, 'SCHEMA')
SELECT enumlabel FROM pg_enum
WHERE enumtypid = #{quote(name)}::regtype::oid
ORDER BY enumsortorder
SQL
end
# Add the schema option when extracting table options
def table_options(table_name)
options = super
if PostgreSQL.config.schemas.enabled
table, schema = table_name.split('.').reverse
if table.present? && schema.present? && schema != current_schema
options[:schema] = schema
end
end
if options[:options]&.start_with?('INHERITS (')
options.delete(:options)
tables = inherited_table_names(table_name)
options[:inherits] = tables.one? ? tables.first : tables
end
options
end
# When dumping the schema we need to add all schemas, not only those
# active for the current +schema_search_path+
def quoted_scope(name = nil, type: nil)
return super unless name.nil?
scope = super
global = scope[:schema].start_with?('ANY (')
scope[:schema] = "ANY ('{#{user_defined_schemas.join(',')}}')"
scope
end
# Fix the query to include the schema on tables names when dumping
def data_source_sql(name = nil, type: nil)
return super unless name.nil?
super.sub('SELECT c.relname FROM', "SELECT n.nspname || '.' || c.relname FROM")
end
# Add schema and inherits as one of the valid options for table
# definition
def valid_table_definition_options
super + [:schema, :inherits]
end
# Add proper support for schema load when using versioned commands
def assume_migrated_upto_version(version)
return super unless PostgreSQL.config.versioned_commands.enabled
return super if (commands = pool.migration_context.migration_commands).empty?
version = version.to_i
migration_context = pool.migration_context
migrated = migration_context.get_all_versions
versions = migration_context.migrations.map(&:version)
inserting = (versions - migrated).select { |v| v < version }
inserting << version unless migrated.include?(version)
return if inserting.empty?
duplicated = inserting.tally.filter_map { |v, count| v if count > 1 }
raise <<~MSG.squish if duplicated.present?
Duplicate migration #{duplicated.first}.
Please renumber your migrations to resolve the conflict.
MSG
VersionedCommands::SchemaTable.new(pool).create_table
execute insert_versions_sql(inserting)
end
# Add proper support for schema load when using versioned commands
def insert_versions_sql(versions)
return super unless PostgreSQL.config.versioned_commands.enabled
commands = pool.migration_context.migration_commands.select do |migration|
versions.include?(migration.version)
end
return super if commands.empty?
table = quote_table_name(VersionedCommands::SchemaTable.new(pool).table_name)
sql = super(versions - commands.map(&:version))
sql << "\nINSERT INTO #{table} (version, type, object_name) VALUES\n"
sql << commands.map do |m|
+"(#{quote(m.version)}, #{quote(m.type)}, #{quote(m.object_name)})"
end.join(",\n")
sql << ";"
sql
end
private
# Remove the schema from the sequence name
def sequence_name_from_parts(table_name, column_name, suffix)
super(table_name.split('.').last, column_name, suffix)
end
# Helper for supporting schema name in several methods
def sanitize_name_with_schema(name, options)
return name if (schema = options&.delete(:schema)).blank?
Quoting::Name.new(schema.to_s, name.to_s)
end
def quote_enum_values(name, values, options)
prefix = options[:prefix]
prefix = name if prefix === true
suffix = options[:suffix]
suffix = name if suffix === true
values.map! do |value|
quote([prefix, value, suffix].compact.join('_'))
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/adapter.rb
================================================
# frozen_string_literal: true
require_relative 'adapter/database_statements'
require_relative 'adapter/oid'
require_relative 'adapter/quoting'
require_relative 'adapter/schema_creation'
require_relative 'adapter/schema_definitions'
require_relative 'adapter/schema_dumper'
require_relative 'adapter/schema_statements'
module Torque
module PostgreSQL
module Adapter
include Quoting
include DatabaseStatements
include SchemaStatements
# :nodoc:
class DeduplicatableArray < ::Array
def deduplicate
map { |value| -value }
end
alias :-@ :deduplicate
end
# Get the current PostgreSQL version as a Gem Version.
def version
@version ||= Gem::Version.new(
select_value('SELECT version()').match(/#{Adapter::ADAPTER_NAME} ([\d\.]+)/)[1]
)
end
# Add `inherits` and `schema` to the list of extracted table options
def extract_table_options!(options)
super.merge(options.extract!(:inherits, :schema))
end
# Allow filtered bulk insert by adding the where clause. This method is
# only used by +InsertAll+, so it somewhat safe to override it
def build_insert_sql(insert)
super.tap do |sql|
if insert.update_duplicates? && insert.where_condition?
if insert.returning
sql.sub!(' RETURNING ', " WHERE #{insert.where} RETURNING ")
else
sql << " WHERE #{insert.where}"
end
end
end
end
end
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend Adapter
end
end
================================================
FILE: lib/torque/postgresql/arel/infix_operation.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
Math = Module.new
def self.build_operations(operations)
default_alias = :visit_Arel_Nodes_InfixOperation
operations&.each do |name, operator|
klass_name = name.to_s.camelize
next if ::Arel::Nodes.const_defined?(klass_name)
klass = Class.new(::Arel::Nodes::InfixOperation)
operator = (-operator).to_sym
klass.send(:define_method, :initialize) { |*args| super(operator, *args) }
::Arel::Nodes.const_set(klass_name, klass)
visitor = :"visit_Arel_Nodes_#{klass_name}"
::Arel::Visitors::PostgreSQL.send(:alias_method, visitor, default_alias)
# Don't worry about quoting here, if the right side is something that
# doesn't need quoting, it will leave it as it is
Math.send(:define_method, klass_name.underscore) { |other| klass.new(self, other) }
end
end
::Arel::Nodes::Node.include(Math)
::Arel::Attribute.include(Math)
end
end
end
================================================
FILE: lib/torque/postgresql/arel/join_source.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
module JoinSource
attr_accessor :only
def only?
only === true
end
end
::Arel::Nodes::JoinSource.include JoinSource
end
end
end
================================================
FILE: lib/torque/postgresql/arel/nodes.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
module Nodes
class Cast < ::Arel::Nodes::Binary
include ::Arel::Expressions
include ::Arel::Predications
include ::Arel::AliasPredication
include ::Arel::OrderPredications
include ::Arel::Math
def initialize(left, right, array = false)
right = +right.to_s
right << '[]' if array
super left, right
end
end
class Ref < ::Arel::Nodes::Unary
attr_reader :reference
alias to_s expr
def initialize(expr, reference = nil)
@reference = reference
super expr
end
def as(other)
@reference&.as(other) || super
end
end
end
::Arel.define_singleton_method(:array) do |*values, cast: nil|
values = values.first if values.size.eql?(1) && values.first.is_a?(::Enumerable)
result = ::Arel::Nodes.build_quoted(values)
result = result.pg_cast(cast, true) if cast.present?
result
end
::Arel::Nodes::Function.include(::Arel::Math)
end
end
end
================================================
FILE: lib/torque/postgresql/arel/operations.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
module Operations
# Create a cast operation
def pg_cast(type, array = false)
Nodes::Cast.new(self, type, array)
end
# Make sure to add proper support over AR's own +cast+ method while
# still allow attributes to be casted
def cast(type, array = false)
defined?(super) && !array ? super(type) : pg_cast(type, array)
end
end
::Arel::Attributes::Attribute.include(Operations)
::Arel::Nodes::SqlLiteral.include(Operations)
::Arel::Nodes::Node.include(Operations)
end
end
end
================================================
FILE: lib/torque/postgresql/arel/select_manager.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
module SelectManager
def only
@ctx.source.only = true
end
end
::Arel::SelectManager.include SelectManager
end
end
end
================================================
FILE: lib/torque/postgresql/arel/visitors.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Arel
module Visitors
# Add ONLY modifier to query
def visit_Arel_Nodes_JoinSource(o, collector)
collector << 'ONLY ' if o.only?
super
end
# Allow quoted arrays to get here
def visit_Arel_Nodes_Quoted(o, collector)
return super unless o.expr.is_a?(::Enumerable)
quote_array(o.expr, collector)
end
# Allow quoted arrays to get here
def visit_Arel_Nodes_Casted(o, collector)
value = o.value_for_database
klass = ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array::Data
return super unless value.is_a?(klass)
quote_array(value.values, collector)
end
## TORQUE VISITORS
def visit_Torque_PostgreSQL_Arel_Nodes_Ref(o, collector)
collector << quote_table_name(o.expr)
end
# Allow casting any node
def visit_Torque_PostgreSQL_Arel_Nodes_Cast(o, collector)
visit(o.left, collector) << '::' << o.right
end
private
def quote_array(value, collector)
value = value.map(&::Arel::Nodes.method(:build_quoted))
collector << 'ARRAY['
visit_Array(value, collector)
collector << ']'
end
end
::Arel::Visitors::PostgreSQL.prepend(Visitors)
end
end
end
================================================
FILE: lib/torque/postgresql/arel.rb
================================================
require_relative 'arel/infix_operation'
require_relative 'arel/join_source'
require_relative 'arel/nodes'
require_relative 'arel/operations'
require_relative 'arel/select_manager'
require_relative 'arel/visitors'
================================================
FILE: lib/torque/postgresql/associations/association_scope.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module AssociationScope
# A customized predicate builder for array attributes that can be used
# standalone and changes the behavior of the blank state
class PredicateBuilderArray
include PredicateBuilder::ArrayHandler
def call_with_empty(attribute)
'1=0' # Does not match records with empty arrays
end
end
module ClassMethods
def get_bind_values(*)
super.flatten
end
end
private
# When loading a join by value (last as in we know which records to
# load) only has many array need to have a different behavior, so it
# can properly match array values
def last_chain_scope(scope, reflection, owner)
return super unless reflection.connected_through_array?
return super if reflection.macro == :belongs_to_many
constraint = PredicateBuilderArray.new.call_for_array(
reflection.array_attribute,
transform_value(owner[reflection.join_foreign_key]),
)
scope.where!(constraint)
end
# When loading a join by reference (next as in we don't know which
# records to load), it can take advantage of the new predicate builder
# to figure out the most optimal way to connect both properties
def next_chain_scope(scope, reflection, next_reflection)
return super unless reflection.connected_through_array?
primary_key = reflection.aliased_table[reflection.join_primary_key]
foreign_key = next_reflection.aliased_table[reflection.join_foreign_key]
constraint = PredicateBuilder::ArelAttributeHandler.call(primary_key, foreign_key)
scope.joins!(join(foreign_table, constraint))
end
# For array-like values, it needs to call the method as many times as
# the array size
def transform_value(value)
if value.is_a?(::Enumerable)
value.map { |v| value_transformation.call(v) }
else
value_transformation.call(value)
end
end
end
::ActiveRecord::Associations::AssociationScope.singleton_class.prepend(AssociationScope::ClassMethods)
::ActiveRecord::Associations::AssociationScope.prepend(AssociationScope)
end
end
end
================================================
FILE: lib/torque/postgresql/associations/belongs_to_many_association.rb
================================================
# frozen_string_literal: true
require 'active_record/associations/collection_association'
# FIXME: build, create
module Torque
module PostgreSQL
module Associations
class BelongsToManyAssociation < ::ActiveRecord::Associations::CollectionAssociation
include ::ActiveRecord::Associations::ForeignAssociation
## CUSTOM
def ids_reader
if loaded?
target.pluck(reflection.active_record_primary_key)
elsif !target.empty?
load_target.pluck(reflection.active_record_primary_key)
else
stale_state || column_default_value
end
end
def ids_writer(ids)
ids = ids.presence || column_default_value
owner.write_attribute(source_attr, ids)
return unless owner.persisted? && owner.attribute_changed?(source_attr)
owner.update_attribute(source_attr, ids)
end
def size
if loaded?
target.size
elsif !target.empty?
unsaved_records = target.select(&:new_record?)
unsaved_records.size + stale_state.size
else
stale_state&.size || 0
end
end
def empty?
size.zero?
end
def include?(record)
return false unless record.is_a?(reflection.klass)
return include_in_memory?(record) if record.new_record?
(!target.empty? && target.include?(record)) ||
stale_state&.include?(record.read_attribute(klass_attr))
end
def load_target
if stale_target? || find_target?
persisted_records = (find_target || []) + target.extract!(&:persisted?)
@target = merge_target_lists(persisted_records, target)
end
loaded!
target
end
def build_changes(from_target = false)
return yield if defined?(@_building_changes) && @_building_changes
@_building_changes = true
yield.tap { ids_writer(from_target ? ids_reader : stale_state) }
ensure
@_building_changes = nil
end
def trigger(prefix, before_ids, after_ids)
removed_ids = before_ids - after_ids
added_ids = after_ids - before_ids
if removed_ids.any?
callbacks_for(method = :"#{prefix}_remove").each do |callback|
target_scope.find(removed_ids).each do |record|
callback.call(method, owner, record)
end
end
end
if added_ids.any?
callbacks_for(method = :"#{prefix}_add").each do |callback|
target_scope.find(added_ids).each do |record|
callback.call(method, owner, record)
end
end
end
end
## HAS MANY
def handle_dependency
case options[:dependent]
when :restrict_with_exception
raise ActiveRecord::DeleteRestrictionError.new(reflection.name) unless empty?
when :restrict_with_error
unless empty?
record = owner.class.human_attribute_name(reflection.name).downcase
owner.errors.add(:base, :'restrict_dependent_destroy.has_many', record: record)
throw(:abort)
end
when :destroy
load_target.each { |t| t.destroyed_by_association = reflection }
destroy_all
when :destroy_async
load_target.each do |t|
t.destroyed_by_association = reflection
end
unless target.empty?
association_class = target.first.class
primary_key_column = association_class.primary_key.to_sym
ids = target.collect do |assoc|
assoc.public_send(primary_key_column)
end
enqueue_destroy_association(
owner_model_name: owner.class.to_s,
owner_id: owner.id,
association_class: association_class.to_s,
association_ids: ids,
association_primary_key_column: primary_key_column,
ensuring_owner_was_method: options.fetch(:ensuring_owner_was, nil)
)
end
else
delete_all
end
end
def insert_record(record, *)
(record.persisted? || super).tap do |saved|
ids_rewriter(record.read_attribute(klass_attr), :<<) if saved
end
end
## BELONGS TO
def default(&block)
writer(owner.instance_exec(&block)) if reader.nil?
end
private
## CUSTOM
def _create_record(attributes, raises = false, &block)
if attributes.is_a?(Array)
attributes.collect { |attr| _create_record(attr, raises, &block) }
else
build_record(attributes, &block).tap do |record|
transaction do
result = nil
add_to_target(record) do
result = insert_record(record, true, raises) { @_was_loaded = loaded? }
end
raise ActiveRecord::Rollback unless result
end
end
end
end
# When the idea is to nullify the association, then just set the owner
# +primary_key+ as empty
def delete_count(method, scope, ids)
size_cache = scope.delete_all if method == :delete_all
(size_cache || ids.size).tap { ids_rewriter(ids, :-) }
end
def delete_or_nullify_all_records(method)
delete_count(method, scope, ids_reader)
end
# Deletes the records according to the :dependent option.
def delete_records(records, method)
ids = read_records_ids(records)
if method == :destroy
records.each(&:destroy!)
ids_rewriter(ids, :-)
else
scope = self.scope.where(klass_attr => records)
delete_count(method, scope, ids)
end
end
def source_attr
reflection.foreign_key
end
def klass_attr
reflection.active_record_primary_key
end
def read_records_ids(records)
return unless records.present?
Array.wrap(records).each_with_object(klass_attr).map(&:read_attribute).presence
end
def ids_rewriter(ids, operator)
list = owner[source_attr] ||= []
list = list.public_send(operator, ids)
owner[source_attr] = list.uniq.compact.presence || column_default_value
return if @_building_changes || !owner.persisted?
owner.update_attribute(source_attr, list)
end
def column_default_value
owner.class.columns_hash[source_attr].default
end
def callback(*)
true # This is handled/trigger when the owner record actually changes
end
## HAS MANY
def replace_records(*)
build_changes(true) { super }
end
def concat_records(*)
build_changes(true) { super }
end
def delete_or_destroy(*)
build_changes(true) { super }
end
def difference(a, b)
a - b
end
def intersection(a, b)
a & b
end
## BELONGS TO
def scope_for_create
super.except!(klass.primary_key)
end
def find_target?
!loaded? && foreign_key_present? && klass
end
def foreign_key_present?
stale_state.present?
end
def invertible_for?(record)
return unless (inverse = inverse_reflection_for(record))
collection_class = ::ActiveRecord::Associations::HasManyAssociation
inverse.is_a?(collection_class) && inverse.connected_through_array?
end
def stale_state
owner.read_attribute(source_attr)
end
end
::ActiveRecord::Associations.const_set(:BelongsToManyAssociation, BelongsToManyAssociation)
end
end
end
================================================
FILE: lib/torque/postgresql/associations/builder/belongs_to_many.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module Builder
class BelongsToMany < ::ActiveRecord::Associations::Builder::CollectionAssociation
def self.macro
:belongs_to_many
end
def self.valid_options(options)
super + [:touch, :optional, :default, :dependent, :primary_key, :required]
end
def self.valid_dependent_options
[:restrict_with_error, :restrict_with_exception]
end
def self.define_callbacks(model, reflection)
super
add_touch_callbacks(model, reflection) if reflection.options[:touch]
add_default_callbacks(model, reflection) if reflection.options[:default]
add_change_callbacks(model, reflection)
end
def self.define_readers(mixin, name)
mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{name}
association(:#{name}).reader
end
CODE
end
def self.define_writers(mixin, name)
mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{name}=(value)
association(:#{name}).writer(value)
end
CODE
end
def self.add_default_callbacks(model, reflection)
model.before_validation ->(o) do
o.association(reflection.name).default(&reflection.options[:default])
end
end
def self.add_touch_callbacks(model, reflection)
foreign_key = reflection.foreign_key
n = reflection.name
touch = reflection.options[:touch]
callback = ->(changes_method) do
->(record) do
BelongsToMany.touch_record(record, record.send(changes_method), foreign_key,
n, touch, belongs_to_touch_method)
end
end
model.after_create callback.call(:saved_changes), if: :saved_changes?
model.after_update callback.call(:saved_changes), if: :saved_changes?
model.after_destroy callback.call(:changes_to_save)
model.after_touch callback.call(:changes_to_save)
end
def self.touch_record(o, changes, foreign_key, name, touch, touch_method) # :nodoc:
old_foreign_ids = changes[foreign_key] && changes[foreign_key].first
if old_foreign_ids.present?
association = o.association(name)
reflection = association.reflection
klass = association.klass
primary_key = reflection.association_primary_key(klass)
old_records = klass.find_by(primary_key => old_foreign_ids)
old_records&.map do |old_record|
if touch != true
old_record.send(touch_method, touch)
else
old_record.send(touch_method)
end
end
end
o.send(name)&.map do |record|
if record && record.persisted?
if touch != true
record.send(touch_method, touch)
else
record.send(touch_method)
end
end
end
end
def self.add_change_callbacks(model, reflection)
foreign_key = reflection.foreign_key
name = reflection.name
model.before_save ->(record) do
before, after = record.changes[foreign_key]
record.association(name).trigger(:before, before, after) if before && after
end
model.after_save ->(record) do
before, after = record.previous_changes[foreign_key]
record.association(name).trigger(:after, before, after) if before && after
end
end
def self.add_destroy_callbacks(model, reflection)
model.after_destroy lambda { |o| o.association(reflection.name).handle_dependency }
end
def self.define_validations(model, reflection)
if reflection.options.key?(:required)
reflection.options[:optional] = !reflection.options.delete(:required)
end
if reflection.options[:optional].nil?
required = model.belongs_to_many_required_by_default
else
required = !reflection.options[:optional]
end
super
if required
model.validates_presence_of reflection.name, message: :required
end
end
end
::ActiveRecord::Associations::Builder.const_set(:BelongsToMany, BelongsToMany)
end
end
end
end
================================================
FILE: lib/torque/postgresql/associations/builder/has_many.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module Builder
module HasMany
def valid_options(options)
super + [:array]
end
end
::ActiveRecord::Associations::Builder::HasMany.extend(HasMany)
end
end
end
end
================================================
FILE: lib/torque/postgresql/associations/builder.rb
================================================
require_relative 'builder/belongs_to_many'
require_relative 'builder/has_many'
================================================
FILE: lib/torque/postgresql/associations/foreign_association.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module ForeignAssociation
# There is no problem of adding temporary items on target because
# CollectionProxy will handle memory and persisted relationship
def inversed_from(record)
return super unless reflection.connected_through_array?
self.target ||= []
self.target.push(record) unless self.target.include?(record)
@inversed = self.target.present?
end
# The binds and the cache are getting mixed and caching the wrong query
def skip_statement_cache?(*)
super || reflection.connected_through_array?
end
private
# This is mainly for the has many when connect through an array to add
# its id to the list of the inverse belongs to many association
def set_owner_attributes(record)
return super unless reflection.connected_through_array?
add_id = owner[reflection.active_record_primary_key]
list = record[reflection.foreign_key] ||= []
list.push(add_id) unless list.include?(add_id)
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/associations/preloader/association.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module Preloader
module Association
delegate :connected_through_array?, to: :@reflection
# For reflections connected through an array, make sure to properly
# decuple the list of ids and set them as associated with the owner
def run
return self if run?
return super unless connected_through_array?
@run = true
send("run_array_for_#{@reflection.macro}")
self
end
# Correctly correlate records when they are connected theough an array
def set_inverse(record)
return super unless connected_through_array? && @reflection.macro == :has_many
# Only the first owner is associated following the same instruction
# on the original implementation
convert_key(record[association_key_name])&.each do |key|
if owners = owners_by_key[key]
association = owners.first.association(reflection.name)
association.set_inverse_instance(record)
end
end
end
# Requires a slight change when running on has many since the value
# of the foreign key being an array
def load_records(raw_records = nil)
return super unless connected_through_array? && @reflection.macro == :has_many
@records_by_owner = {}.compare_by_identity
raw_records ||= loader_query.records_for([self])
@preloaded_records = raw_records.select do |record|
assignments = false
keys = convert_key(record[association_key_name]) || []
owners_by_key.values_at(*keys).each do |owner|
entries = (@records_by_owner[owner] ||= [])
if reflection.collection? || entries.empty?
entries << record
assignments = true
end
end
assignments
end
end
# Make sure to change the process when connected through an array
def owners_by_key
return super unless connected_through_array?
@owners_by_key ||= owners.each_with_object({}) do |owner, result|
Array.wrap(convert_key(owner[owner_key_name])).each do |key|
(result[key] ||= []) << owner
end
end
end
private
# Specific run for belongs_many association
def run_array_for_belongs_to_many
# Add reverse to has_many
records = groupped_records
owners.each do |owner|
items = records.values_at(*Array.wrap(owner[owner_key_name]))
associate_records_to_owner(owner, items.flatten)
end
end
# Specific run for has_many association
def run_array_for_has_many
# Add reverse to belongs_to_many
records = Hash.new { |h, k| h[k] = [] }
groupped_records.each do |ids, record|
ids.each { |id| records[id].concat(Array.wrap(record)) }
end
records.default_proc = nil
owners.each do |owner|
associate_records_to_owner(owner, records[owner[owner_key_name]] || [])
end
end
# Build correctly the constraint condition in order to get the
# associated ids
def records_for(ids, &block)
return super unless connected_through_array?
condition = scope.arel_table[association_key_name]
condition = reflection.build_id_constraint(condition, ids.flatten.uniq)
scope.where(condition).load(&block)
end
def associate_records_to_owner(owner, records)
return super unless connected_through_array?
association = owner.association(reflection.name)
association.loaded!
association.target.concat(records)
end
def groupped_records
preloaded_records.group_by do |record|
convert_key(record[association_key_name])
end
end
end
::ActiveRecord::Associations::Preloader::Association.prepend(Association)
end
end
end
end
================================================
FILE: lib/torque/postgresql/associations/preloader/loader_query.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Associations
module Preloader
module LoaderQuery
def foreign_column
@foreign_column ||= scope.columns_hash[association_key_name.to_s]
end
def load_records_for_keys(keys, &block)
condition = query_condition_for(keys)
return super if condition.nil?
scope.where(condition).load(&block)
end
def query_condition_for(keys)
return unless connected_through_array?
value = scope.cast_for_condition(foreign_column, keys.to_a)
scope.table[association_key_name].overlaps(value)
end
def connected_through_array?
!association_key_name.is_a?(Array) && foreign_column&.array?
end
end
::ActiveRecord::Associations::Preloader::Association::LoaderQuery
.prepend(LoaderQuery)
end
end
end
end
================================================
FILE: lib/torque/postgresql/associations/preloader.rb
================================================
require_relative 'preloader/association'
require_relative 'preloader/loader_query'
================================================
FILE: lib/torque/postgresql/associations.rb
================================================
require_relative 'associations/association_scope'
require_relative 'associations/belongs_to_many_association'
require_relative 'associations/foreign_association'
require_relative 'associations/builder'
require_relative 'associations/preloader'
association_mod = Torque::PostgreSQL::Associations::ForeignAssociation
::ActiveRecord::Associations::HasManyAssociation.prepend(association_mod)
::ActiveRecord::Associations::BelongsToManyAssociation.prepend(association_mod)
================================================
FILE: lib/torque/postgresql/attributes/builder/enum.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
module Builder
class Enum
VALID_TYPES = %i[enum enum_set].freeze
FN = '::Torque::PostgreSQL::FN'
attr_accessor :klass, :attribute, :subtype, :options, :values,
:klass_module, :instance_module
# Start a new builder of methods for enum values on ActiveRecord::Base
def initialize(klass, attribute, options)
@klass = klass
@attribute = attribute.to_s
@subtype = klass.attribute_types[@attribute]
@options = options
raise Interrupt unless subtype.respond_to?(:klass)
@values = subtype.klass.values
if @options[:only]
@values &= Array(@options[:only]).map(&:to_s)
end
if @options[:except]
@values -= Array(@options[:except]).map(&:to_s)
end
end
# Get the list of methods based on enum values
def values_methods
return @values_methods if defined?(@values_methods)
prefix = options.fetch(:prefix, nil)
suffix = options.fetch(:suffix, nil)
prefix = attribute if prefix == true
suffix = attribute if suffix == true
base = [prefix, '%s', suffix].compact.join('_')
@values_methods = begin
values.map do |val|
key = val.downcase.tr('- ', '__')
scope = base % key
ask = scope + '?'
bang = scope + '!'
[key, [scope, ask, bang, val]]
end.to_h
end
end
# Check if it's building the methods for sets
def set_features?
options[:set_features].present?
end
# Check if any of the methods that will be created get in conflict
# with the base class methods
def conflicting?
return if options[:force] == true
attributes = attribute.pluralize
dangerous?(attributes, true)
dangerous?("#{attributes}_keys", true)
dangerous?("#{attributes}_texts", true)
dangerous?("#{attributes}_options", true)
dangerous?("#{attribute}_text")
if set_features?
dangerous?("has_#{attributes}", true)
dangerous?("has_any_#{attributes}", true)
end
values_methods.each do |attr, (scope, ask, bang, *)|
dangerous?(scope, true)
dangerous?(bang)
dangerous?(ask)
end
rescue Interrupt => err
raise ArgumentError, <<-MSG.squish
Enum #{subtype.name} was not able to generate requested
methods because the method #{err} already exists in
#{klass.name}.
MSG
end
# Create all methods needed
def build
@klass_module = Module.new
@instance_module = Module.new
plural
stringify
all_values
set_scopes if set_features?
klass.extend klass_module
klass.include instance_module
end
private
# Check if the method already exists in the reference class
def dangerous?(method_name, class_method = false)
if class_method
if klass.dangerous_class_method?(method_name)
raise Interrupt, method_name.to_s
end
else
if klass.dangerous_attribute_method?(method_name)
raise Interrupt, method_name.to_s
end
end
rescue Interrupt => e
raise e if Torque::PostgreSQL.config.enum.raise_conflicting
type = class_method ? 'class method' : 'instance method'
indicator = class_method ? '.' : '#'
Torque::PostgreSQL.logger.info(<<~MSG.squish)
Creating #{class_method} :#{method_name} for enum.
Overwriting existing method #{klass.name}#{indicator}#{method_name}.
MSG
end
# Create the method that allow access to the list of values
def plural
enum_klass = subtype.klass.name
klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{attribute.pluralize} # def roles
::#{enum_klass}.values # Enum::Roles.values
end # end
def #{attribute.pluralize}_keys # def roles_keys
::#{enum_klass}.keys # Enum::Roles.keys
end # end
def #{attribute.pluralize}_texts # def roles_texts
::#{enum_klass}.members.map do |member| # Enum::Roles.members do |member|
member.text('#{attribute}', self) # member.text('role', self)
end # end
end # end
def #{attribute.pluralize}_options # def roles_options
#{attribute.pluralize}_texts.zip(::#{enum_klass}.values) # roles_texts.zip(Enum::Roles.values)
end # end
RUBY
end
# Create additional methods when the enum is a set, which needs
# better ways to check if values are present or not
def set_scopes
cast_type = subtype.name.chomp('[]')
klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1
def has_#{attribute.pluralize}(*values) # def has_roles(*values)
attr = arel_table['#{attribute}'] # attr = arel_table['role']
value = #{FN}.bind_with(attr, values) # value = ::Torque::PostgreSQL::FN.bind_with(attr, values)
where(attr.contains(value.pg_cast('#{cast_type}[]'))) # where(attr.contains(value.pg_cast('roles[]')))
end # end
def has_any_#{attribute.pluralize}(*values) # def has_any_roles(*values)
attr = arel_table['#{attribute}'] # attr = arel_table['role']
value = #{FN}.bind_with(attr, values) # value = ::Torque::PostgreSQL::FN.bind_with(attr, values)
where(attr.overlaps(value.pg_cast('#{cast_type}[]'))) # where(attr.overlaps(value.pg_cast('roles[]')))
end # end
RUBY
end
# Create the method that turn the attribute value into text using
# the model scope
def stringify
instance_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{attribute}_text # def role_text
#{attribute}.text('#{attribute}', self) # role.text('role', self)
end # end
RUBY
end
# Create all the methods that represent actions related to the
# attribute value
def all_values
klass_content = ''
instance_content = ''
enum_klass = subtype.klass.name
values_methods.each do |key, (scope, ask, bang, val)|
klass_content += <<-RUBY
def #{scope} # def admin
attr = arel_table['#{attribute}'] # attr = arel_table['role']
where(::#{enum_klass}.scope(attr, '#{val}')) # where(Enum::Roles.scope(attr, 'admin'))
end # end
RUBY
instance_content += <<-RUBY
def #{ask} # def admin?
#{attribute}.#{key}? # role.admin?
end # end
def #{bang} # admin!
self.#{attribute} = '#{val}' # self.role = 'admin'
return unless #{attribute}_changed? # return unless role_changed?
return save! if Torque::PostgreSQL.config.enum.save_on_bang
true # true
end # end
RUBY
end
klass_module.module_eval(klass_content)
instance_module.module_eval(instance_content)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/builder/full_text_search.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
module Builder
class FullTextSearch
attr_accessor :klass, :attribute, :options, :klass_module,
:default_rank, :default_mode, :default_order, :default_language
def initialize(klass, attribute, options = {})
@klass = klass
@attribute = attribute
@options = options
@default_rank = options[:with_rank] == true ? 'rank' : options[:with_rank]&.to_s
@default_mode = options[:mode] || PostgreSQL.config.full_text_search.default_mode
@default_order =
case options[:order]
when :asc, true then :asc
when :desc then :desc
else false
end
@default_language = options[:language] if options[:language].is_a?(String) ||
options[:language].is_a?(Symbol)
@default_language ||= PostgreSQL.config.full_text_search.default_language.to_s
end
# What is the name of the scope to be added to the model
def scope_name
@scope_name ||= [
options[:prefix],
:full_text_search,
options[:suffix],
].compact.join('_')
end
# Just check if the scope name is already defined
def conflicting?
return if options[:force] == true
if klass.dangerous_class_method?(scope_name)
raise Interrupt, scope_name.to_s
end
end
# Create the proper scope
def build
@klass_module = Module.new
add_scope_to_module
klass.extend klass_module
end
# Creates a class method as the scope that builds the full text search
def add_scope_to_module
klass_module.module_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{scope_name}(value#{scope_args})
attr = arel_table['#{attribute}']
fn = ::Torque::PostgreSQL::FN
lang = language.to_s if !language.is_a?(::Symbol)
lang ||= arel_table[language.to_s] if has_attribute?(language)
lang ||= public_send(language) if respond_to?(language)
function = {
default: :to_tsquery,
phrase: :phraseto_tsquery,
plain: :plainto_tsquery,
web: :websearch_to_tsquery,
}[mode.to_sym]
raise ::ArgumentError, <<~MSG.squish if lang.blank?
Unable to determine language from \#{language.inspect}.
MSG
raise ::ArgumentError, <<~MSG.squish if function.nil?
Invalid mode \#{mode.inspect} for full text search.
MSG
value = fn.bind(:value, value.to_s, attr.type_caster)
lang = fn.bind(:lang, lang, attr.type_caster) if lang.is_a?(::String)
query = fn.public_send(function, lang, value)
ranker = fn.ts_rank(attr, query) if rank || order
result = where(fn.infix(:"@@", attr, query))
result = result.order(ranker.public_send(order == :desc ? :desc : :asc)) if order
result.select_extra_values += [ranker.as(rank == true ? 'rank' : rank.to_s)] if rank
result
end
RUBY
end
# Returns the arguments to be used on the scope
def scope_args
args = +''
args << ", order: #{default_order.inspect}"
args << ", rank: #{default_rank.inspect}"
args << ", language: #{default_language.inspect}"
args << ", mode: :#{default_mode}"
args
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/builder/period.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
module Builder
class Period
DIRECT_ACCESS_REGEX = /_?%s_?/
SUPPORTED_TYPES = %i[daterange tsrange tstzrange].freeze
CURRENT_GETTERS = {
daterange: 'Date.current',
tsrange: 'Time.zone.now',
tstzrange: 'Time.zone.now',
}.freeze
TYPE_CASTERS = {
daterange: :date,
tsrange: :timestamp,
tstzrange: :timestamp,
}.freeze
FN = '::Torque::PostgreSQL::FN'
attr_accessor :klass, :attribute, :options, :type, :default, :current_getter,
:type_caster, :threshold, :dynamic_threshold, :klass_module, :instance_module
# Start a new builder of methods for period values on
# ActiveRecord::Base
def initialize(klass, attribute, options)
@klass = klass
@attribute = attribute.to_s
@options = options
@type = klass.attribute_types[@attribute].type
raise ArgumentError, <<-MSG.squish unless SUPPORTED_TYPES.include?(type)
Period cannot be generated for #{attribute} because its type
#{type} is not supported. Only #{SUPPORTED_TYPES.join(', ')} are supported.
MSG
@current_getter = CURRENT_GETTERS[type]
@type_caster = TYPE_CASTERS[type]
@default = options[:pessimistic].blank?
end
# Check if can identify a threshold field
def threshold
@threshold ||= begin
option = options[:threshold]
return if option.eql?(false)
unless option.eql?(true)
return option.is_a?(String) ? option.to_sym : option
end
attributes = klass.attribute_names
default_name = Torque::PostgreSQL.config.period.auto_threshold.to_s
raise ArgumentError, <<-MSG.squish unless attributes.include?(default_name)
Unable to find the #{default_name} to use as threshold for period
features for #{attribute} in #{klass.name} model.
MSG
check_type = klass.attribute_types[default_name].type
raise ArgumentError, <<-MSG.squish unless check_type.eql?(:interval)
The #{default_name} has the wrong type to be used as threshold.
Expected :interval got #{check_type.inspect} in #{klass.name} model.
MSG
default_name.to_sym
end
end
# Generate all the method names
def method_names
@method_names ||= default_method_names.merge(options.fetch(:methods, {}))
end
# Get the list of methods associated withe the class
def klass_method_names
@klass_method_names ||= method_names.to_a[0..22].to_h
end
# Get the list of methods associated withe the instances
def instance_method_names
@instance_method_names ||= method_names.to_a[23..29].to_h
end
# Check if any of the methods that will be created get in conflict
# with the base class methods
def conflicting?
return if options[:force] == true
klass_method_names.values.each { |name| dangerous?(name, true) }
instance_method_names.values.each { |name| dangerous?(name) }
rescue Interrupt => err
raise ArgumentError, <<-MSG.squish
#{subtype.class.name} was not able to generate requested
methods because the method #{err} already exists in
#{klass.name}.
MSG
end
# Create all methods needed
def build
@klass_module = Module.new
@instance_module = Module.new
value_args = ['value']
left_right_args = ['left', 'right = nil']
## Klass methods
build_method_helper :klass, :current_on, value_args # 00
build_method_helper :klass, :current # 01
build_method_helper :klass, :not_current # 02
build_method_helper :klass, :containing, value_args # 03
build_method_helper :klass, :not_containing, value_args # 04
build_method_helper :klass, :overlapping, left_right_args # 05
build_method_helper :klass, :not_overlapping, left_right_args # 06
build_method_helper :klass, :starting_after, value_args # 07
build_method_helper :klass, :starting_before, value_args # 08
build_method_helper :klass, :finishing_after, value_args # 09
build_method_helper :klass, :finishing_before, value_args # 10
if threshold.present?
build_method_helper :klass, :real_containing, value_args # 11
build_method_helper :klass, :real_overlapping, left_right_args # 12
build_method_helper :klass, :real_starting_after, value_args # 13
build_method_helper :klass, :real_starting_before, value_args # 14
build_method_helper :klass, :real_finishing_after, value_args # 15
build_method_helper :klass, :real_finishing_before, value_args # 16
end
unless type.eql?(:daterange)
build_method_helper :klass, :containing_date, value_args # 17
build_method_helper :klass, :not_containing_date, value_args # 18
build_method_helper :klass, :overlapping_date, left_right_args # 19
build_method_helper :klass, :not_overlapping_date, left_right_args # 20
if threshold.present?
build_method_helper :klass, :real_containing_date, value_args # 21
build_method_helper :klass, :real_overlapping_date, left_right_args # 22
end
end
## Instance methods
build_method_helper :instance, :current? # 23
build_method_helper :instance, :current_on?, value_args # 24
build_method_helper :instance, :start # 25
build_method_helper :instance, :finish # 26
if threshold.present?
build_method_helper :instance, :real # 27
build_method_helper :instance, :real_start # 28
build_method_helper :instance, :real_finish # 29
end
klass.extend klass_module
klass.include instance_module
end
def build_method_helper(type, key, args = [])
method_name = method_names[key]
return if method_name.nil?
method_content = send("#{type}_#{key}")
method_content = define_string_method(method_name, method_content, args)
source_module = send("#{type}_module")
source_module.module_eval(method_content)
end
private
# Generates the default method names
def default_method_names
list = Torque::PostgreSQL.config.period.method_names.dup
if options.fetch(:prefixed, true)
list.transform_values { |value| format(value, attribute) }
else
list = list.merge(Torque::PostgreSQL.config.period.direct_method_names)
list.transform_values { |value| value.gsub(DIRECT_ACCESS_REGEX, '') }
end
end
# Check if the method already exists in the reference class
def dangerous?(method_name, class_method = false)
if class_method
if klass.dangerous_class_method?(method_name)
raise Interrupt, method_name.to_s
end
else
if klass.dangerous_attribute_method?(method_name)
raise Interrupt, method_name.to_s
end
end
end
## BUILDER HELPERS
def define_string_method(name, body, args = [])
headline = "def #{name}"
headline += "(#{args.join(', ')})"
[headline, body, 'end'].join("\n")
end
def arel_attribute
@arel_attribute ||= "arel_table[#{attribute.inspect}]"
end
def arel_default_sql
@arel_default_sql ||= arel_sql_bind(@default.inspect)
end
def arel_sql_bind(value)
"#{FN}.bind_with(#{arel_attribute}, #{value})"
end
# Check how to provide the threshold value
def arel_threshold_value
@arel_threshold_value ||= begin
case threshold
when Symbol, String
"arel_table['#{threshold}']"
when ActiveSupport::Duration
value = "'#{threshold.to_i} seconds'"
"::Arel.sql(\"#{value}\").pg_cast(:interval)"
when Numeric
value = threshold.to_i.to_s
value << type_caster.eql?(:date) ? ' days' : ' seconds'
value = "'#{value}'"
"::Arel.sql(\"#{value}\").pg_cast(:interval)"
end
end
end
# Start at version of the value
def arel_start_at
@arel_start_at ||= arel_named_function('lower', arel_attribute)
end
# Finish at version of the value
def arel_finish_at
@arel_finish_at ||= arel_named_function('upper', arel_attribute)
end
# Start at version of the value with threshold
def arel_real_start_at
return arel_start_at unless threshold.present?
@arel_real_start_at ||= begin
result = +"(#{arel_start_at} - #{arel_threshold_value})"
result << '.pg_cast(:date)' if type.eql?(:daterange)
result
end
end
# Finish at version of the value with threshold
def arel_real_finish_at
return arel_finish_at unless threshold.present?
@arel_real_finish_at ||= begin
result = +"(#{arel_finish_at} + #{arel_threshold_value})"
result << '.pg_cast(:date)' if type.eql?(:daterange)
result
end
end
# When the time has a threshold, then the real attribute is complex
def arel_real_attribute
return arel_attribute unless threshold.present?
@arel_real_attribute ||= arel_named_function(
type, arel_real_start_at, arel_real_finish_at,
)
end
# Create an arel version of the type with the following values
def arel_convert_to_type(left, right = nil, set_type = nil)
arel_named_function(set_type || type, left, right || left)
end
# Create an arel named function
def arel_named_function(name, *args)
result = +"#{FN}.#{name}"
result << '(' << args.join(', ') << ')' if args.present?
result
end
# Create an arel version of +nullif+ function
def arel_nullif(*args)
arel_named_function('nullif', *args)
end
# Create an arel version of +coalesce+ function
def arel_coalesce(*args)
arel_named_function('coalesce', *args)
end
# Create an arel version of an empty value for the range
def arel_empty_value
arel_convert_to_type('::Arel.sql(\'NULL\')')
end
# Convert timestamp range to date range format
def arel_daterange(real = false)
arel_named_function(
'daterange',
(real ? arel_real_start_at : arel_start_at) + '.pg_cast(:date)',
(real ? arel_real_finish_at : arel_finish_at) + '.pg_cast(:date)',
'::Arel.sql("\'[]\'")',
)
end
def arel_check_condition(type)
checker = arel_nullif(arel_real_attribute, arel_empty_value)
checker << ".#{type}(value.pg_cast(#{type_caster.inspect}))"
arel_coalesce(checker, arel_default_sql)
end
def arel_formatting_value(condition = nil, value = 'value', cast: nil)
[
"#{value} = arel_table[#{value}] if #{value}.is_a?(Symbol)",
"unless #{value}.respond_to?(:pg_cast)",
" #{value} = #{FN}.bind_with(#{arel_attribute}, #{value})",
(" #{value} = #{value}.pg_cast(#{cast.inspect})" if cast),
'end',
condition,
].compact.join("\n")
end
def arel_formatting_left_right(condition, set_type = nil, cast: nil)
[
arel_formatting_value(nil, 'left', cast: cast),
'',
'if right.present?',
' ' + arel_formatting_value(nil, 'right', cast: cast),
" value = #{arel_convert_to_type('left', 'right', set_type)}",
'else',
' value = left',
'end',
'',
condition,
].join("\n")
end
## METHOD BUILDERS
def klass_current_on
arel_formatting_value("where(#{arel_check_condition(:contains)})")
end
def klass_current
[
"value = #{arel_sql_bind(current_getter)}",
"where(#{arel_check_condition(:contains)})",
].join("\n")
end
def klass_not_current
[
"value = #{arel_sql_bind(current_getter)}",
"where.not(#{arel_check_condition(:contains)})",
].join("\n")
end
def klass_containing
arel_formatting_value("where(#{arel_attribute}.contains(value))")
end
def klass_not_containing
arel_formatting_value("where.not(#{arel_attribute}.contains(value))")
end
def klass_overlapping
arel_formatting_left_right("where(#{arel_attribute}.overlaps(value))")
end
def klass_not_overlapping
arel_formatting_left_right("where.not(#{arel_attribute}.overlaps(value))")
end
def klass_starting_after
arel_formatting_value("where((#{arel_start_at}).gt(value))")
end
def klass_starting_before
arel_formatting_value("where((#{arel_start_at}).lt(value))")
end
def klass_finishing_after
arel_formatting_value("where((#{arel_finish_at}).gt(value))")
end
def klass_finishing_before
arel_formatting_value("where((#{arel_finish_at}).lt(value))")
end
def klass_real_containing
arel_formatting_value("where(#{arel_real_attribute}.contains(value))")
end
def klass_real_overlapping
arel_formatting_left_right("where(#{arel_real_attribute}.overlaps(value))")
end
def klass_real_starting_after
arel_formatting_value("where(#{arel_real_start_at}.gt(value))")
end
def klass_real_starting_before
arel_formatting_value("where(#{arel_real_start_at}.lt(value))")
end
def klass_real_finishing_after
arel_formatting_value("where(#{arel_real_finish_at}.gt(value))")
end
def klass_real_finishing_before
arel_formatting_value("where(#{arel_real_finish_at}.lt(value))")
end
def klass_containing_date
arel_formatting_value("where(#{arel_daterange}.contains(value))",
cast: :date)
end
def klass_not_containing_date
arel_formatting_value("where.not(#{arel_daterange}.contains(value))",
cast: :date)
end
def klass_overlapping_date
arel_formatting_left_right("where(#{arel_daterange}.overlaps(value))",
:daterange, cast: :date)
end
def klass_not_overlapping_date
arel_formatting_left_right("where.not(#{arel_daterange}.overlaps(value))",
:daterange, cast: :date)
end
def klass_real_containing_date
arel_formatting_value("where(#{arel_daterange(true)}.contains(value))",
cast: :date)
end
def klass_real_overlapping_date
arel_formatting_left_right("where(#{arel_daterange(true)}.overlaps(value))",
:daterange, cast: :date)
end
def instance_current?
"#{method_names[:current_on?]}(#{current_getter})"
end
def instance_current_on?
attr_value = threshold.present? ? method_names[:real] : attribute
default_value = default.inspect
[
"return #{default_value} if #{attr_value}.nil?",
"(#{attr_value}.min.try(:infinite?) || #{attr_value}.min <= value) &&",
" (#{attr_value}.max.try(:infinite?) || #{attr_value}.max > value)",
].join("\n")
end
def instance_start
"#{attribute}&.min"
end
def instance_finish
"#{attribute}&.max"
end
def instance_real
left = method_names[:real_start]
right = method_names[:real_finish]
[
"left = #{left}",
"right = #{right}",
'return unless left || right',
'((left || -::Float::INFINITY)..(right || ::Float::INFINITY))',
].join("\n")
end
def instance_real_start
suffix = type.eql?(:daterange) ? '.to_date' : ''
threshold_value = threshold.is_a?(Symbol) \
? threshold.to_s \
: threshold.to_i.to_s + '.seconds'
[
"return if #{method_names[:start]}.nil?",
"value = #{method_names[:start]}",
"value -= (#{threshold_value} || 0)",
"value#{suffix}"
].join("\n")
end
def instance_real_finish
suffix = type.eql?(:daterange) ? '.to_date' : ''
threshold_value = threshold.is_a?(Symbol) \
? threshold.to_s \
: threshold.to_i.to_s + '.seconds'
[
"return if #{method_names[:finish]}.nil?",
"value = #{method_names[:finish]}",
"value += (#{threshold_value} || 0)",
"value#{suffix}"
].join("\n")
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/builder.rb
================================================
# frozen_string_literal: true
require_relative 'builder/enum'
require_relative 'builder/period'
require_relative 'builder/full_text_search'
module Torque
module PostgreSQL
module Attributes
module Builder
def self.include_on(klass, method_name, builder_klass, **extra, &block)
klass.define_singleton_method(method_name) do |*args, **options|
return unless table_exists?
args.each do |attribute|
# Generate methods on self class
builder = builder_klass.new(self, attribute, extra.merge(options))
builder.conflicting?
builder.build
# Additional settings for the builder
instance_exec(builder, &block) if block.present?
rescue Interrupt
# Not able to build the attribute, maybe pending migrations
end
end
end
def self.search_vector_options(columns:, language: nil, stored: true, **options)
weights = to_search_weights(columns)
operation = to_search_vector_operation(language, weights).to_sql
options[:index] = {
using: PostgreSQL.config.full_text_search.default_index_type,
} if options[:index] == true
options.merge(type: :tsvector, as: operation, stored: stored)
end
def self.to_search_weights(columns)
if !columns.is_a?(Hash)
extras = columns.size > 3 ? columns.size - 3 : 0
weights = %w[A B C] + (['D'] * extras)
columns = Array.wrap(columns).zip(weights).to_h
end
columns.transform_keys(&:to_s)
end
def self.to_search_vector_operation(language, weights)
language ||= PostgreSQL.config.full_text_search.default_language
language = ::Arel.sql(language.is_a?(Symbol) ? language.to_s : "'#{language}'")
simple = weights.size == 1
empty_string = ::Arel.sql("''")
operations = weights.map do |column, weight|
column = ::Arel.sql(column.to_s)
weight = ::Arel.sql("'#{weight}'")
op = FN.to_tsvector(language, FN.coalesce(column, empty_string))
op = FN.setweight(op, weight) unless simple
op
end
FN.concat(*operations)
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/enum.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
class Enum < String
include Comparable
class EnumError < ArgumentError; end
LAZY_VALUE = 0.chr
class << self
include Enumerable
delegate :each, :sample, :size, :length, to: :members
# Find or create the class that will handle the value
def lookup(name)
const = name.to_s.camelize
namespace = PostgreSQL.config.enum.namespace
return namespace.const_get(const) if namespace.const_defined?(const)
namespace.const_set(const, Class.new(Enum))
end
# Provide a method on the given class to setup which enums will be
# manually initialized
def include_on(klass, method_name = nil)
method_name ||= PostgreSQL.config.enum.base_method
Builder.include_on(klass, method_name, Builder::Enum) do |builder|
defined_enums[builder.attribute.to_s] = builder.subtype.klass
end
end
# Overpass new so blank values return only nil
def new(value)
return Lazy.new(self, LAZY_VALUE) if value.blank?
super
end
# Load the list of values in a lazy way
def values
@values ||= self == Enum ? nil : begin
connection.enum_values(type_name).freeze
end
end
# List of values as symbols
def keys
values.map(&:to_sym)
end
# Different from values, it returns the list of items already casted
def members
values.map(&method(:new))
end
# Get the list of the values translated by I18n
def texts
members.map(&:text)
end
# Get a list of values translated and ready for select
def to_options
texts.zip(values)
end
# Fetch a value from the list
# see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/fixtures.rb#L656
# see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/validations/uniqueness.rb#L101
def fetch(value, *)
new(value.to_s) if values.include?(value)
end
alias [] fetch
# Get the type name from its class name
def type_name
@type_name ||= self.name.demodulize.underscore
end
# Check if the value is valid
def valid?(value)
return false if self == Enum
return true if value.equal?(LAZY_VALUE)
self.values.include?(value.to_s)
end
# Build an active record scope for a given attribute against a value
def scope(attribute, value)
attribute.eq(value)
end
private
# Allows checking value existance
def respond_to_missing?(method_name, include_private = false)
valid?(method_name) || super
end
# Allow fast creation of values
def method_missing(method_name, *arguments)
return super if self == Enum
valid?(method_name) ? new(method_name.to_s) : super
end
# Get a connection based on its name
def connection
::ActiveRecord::Base.connection
end
end
# Override string initializer to check for a valid value
def initialize(value)
str_value = value.is_a?(Numeric) ? self.class.values[value.to_i] : value.to_s
raise_invalid(value) unless self.class.valid?(str_value)
super(str_value)
end
# Allow comparison between values of the same enum
def <=>(other)
raise_comparison(other) if other.is_a?(Enum) && other.class != self.class
case other
when Numeric, Enum then to_i <=> other.to_i
when String, Symbol then to_i <=> self.class.values.index(other.to_s)
else raise_comparison(other)
end
end
# Only allow value comparison with values of the same class
def ==(other)
(self <=> other) == 0
rescue EnumError
false
end
alias eql? ==
# Since it can have a lazy value, nil can be true here
def nil?
self == LAZY_VALUE
end
alias empty? nil?
# It only accepts if the other value is valid
def replace(value)
raise_invalid(value) unless self.class.valid?(value)
super
end
# Get a translated version of the value
def text(attr = nil, model = nil)
keys = i18n_keys(attr, model) << self.underscore.humanize
::I18n.translate(keys.shift, default: keys)
end
# Change the string result for lazy value
def to_s
nil? ? '' : super
end
# Get the index of the value
def to_i
self.class.values.index(self)
end
# Change the inspection to show the enum name
def inspect
nil? ? 'nil' : ":#{to_s}"
end
private
# Get the i18n keys to check
def i18n_keys(attr = nil, model = nil)
values = { type: self.class.type_name, value: to_s }
list_from = :i18n_type_scopes
if attr && model
values[:attr] = attr
values[:model] = model.model_name.i18n_key
list_from = :i18n_scopes
end
PostgreSQL.config.enum.send(list_from).map do |key|
(key % values).to_sym
end
end
# Check for valid '?' and '!' methods
def respond_to_missing?(method_name, include_private = false)
name = method_name.to_s
return true if name.chomp!('?')
name.chomp!('!') && self.class.valid?(name)
end
# Allow '_' to be associated to '-'
def method_missing(method_name, *arguments)
name = method_name.to_s
if name.chomp!('?')
self == name
elsif name.chomp!('!')
replace(name) unless self == name
else
super
end
end
# Throw an exception for invalid values
def raise_invalid(value)
if value.is_a?(Numeric)
raise EnumError, "#{value.inspect} is out of bounds of #{self.class.name}"
else
raise EnumError, "#{value.inspect} is not valid for #{self.class.name}"
end
end
# Throw an exception for comparison between different enums
def raise_comparison(other)
raise EnumError, "Comparison of #{self.class.name} with #{self.inspect} failed"
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/enum_set.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
class EnumSet < Set
include Comparable
class EnumSetError < Enum::EnumError; end
class << self
include Enumerable
delegate :each, to: :members
delegate :values, :keys, :members, :texts, :to_options, :valid?, :size,
:length, :connection_specification_name, to: :enum_source
# Find or create the class that will handle the value
def lookup(name, enum_klass)
const = name.to_s.camelize + 'Set'
namespace = PostgreSQL.config.enum.namespace
return namespace.const_get(const) if namespace.const_defined?(const)
klass = Class.new(EnumSet)
klass.const_set('EnumSource', enum_klass)
namespace.const_set(const, klass)
end
# Provide a method on the given class to setup which enum sets will be
# manually initialized
def include_on(klass, method_name = nil)
method_name ||= PostgreSQL.config.enum.set_method
Builder.include_on(klass, method_name, Builder::Enum, set_features: true) do |builder|
defined_enums[builder.attribute.to_s] = builder.subtype
end
end
# The original Enum implementation, for individual values
def enum_source
const_get('EnumSource')
end
# Use the power to get a sample of the value
def sample
new(rand(0..((2 ** size) - 1)))
end
# Overpass new so blank values return only nil
def new(*values)
return Lazy.new(self, []) if values.compact.blank?
super
end
# Get the type name from its class name
def type_name
@type_name ||= enum_source.type_name + '[]'
end
# Fetch a value from the list
# see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/fixtures.rb#L656
# see https://github.com/rails/rails/blob/v5.0.0/activerecord/lib/active_record/validations/uniqueness.rb#L101
def fetch(value, *)
new(value.to_s) if values.include?(value)
end
alias [] fetch
# Get the power, 2 ** index, of each element
def power(*values)
values.flatten.map do |item|
item = item.to_i if item.is_a?(Enum)
item = values.index(item) unless item.is_a?(Numeric)
next 0 if item.nil? || item >= size
2 ** item
end.reduce(:+)
end
# Build an active record scope for a given attribute against a value
def scope(attribute, value)
attribute.contains(FN.bind_with(attribute, value).pg_cast(type_name))
end
private
# Allows checking value existence
def respond_to_missing?(method_name, include_private = false)
valid?(method_name) || super
end
# Allow fast creation of values
def method_missing(method_name, *arguments)
return super if self == Enum
valid?(method_name) ? new(method_name.to_s) : super
end
end
# Override string initializer to check for a valid value
def initialize(*values)
items =
if values.size === 1 && values.first.is_a?(Numeric)
transform_power(values.first)
else
transform_values(values)
end
@hash = items.zip(Array.new(items.size, true)).to_h
end
# Allow comparison between values of the same enum
def <=>(other)
raise_comparison(other) if other.is_a?(EnumSet) && other.class != self.class
to_i <=>
case other
when Numeric, EnumSet then other.to_i
when String, Symbol then self.class.power(other.to_s)
when Array, Set then self.class.power(*other)
else raise_comparison(other)
end
end
# Only allow value comparison with values of the same class
def ==(other)
(self <=> other) == 0
rescue EnumSetError
false
end
alias eql? ==
# It only accepts if the other value is valid
def replace(*values)
super(transform_values(values))
end
# Get a translated version of the value
def text(attr = nil, model = nil)
map { |item| item.text(attr, model) }.to_sentence
end
alias to_s text
# Get the index of the value
def to_i
self.class.power(@hash.keys)
end
# Change the inspection to show the enum name
def inspect
"[#{map(&:inspect).join(', ')}]"
end
# Replace the setter by instantiating the value
def []=(key, value)
super(key, instantiate(value))
end
# Override the merge method to ensure formatted values
def merge(other)
super other.map(&method(:instantiate))
end
# Override bitwise & operator to ensure formatted values
def &(other)
other = other.entries.map(&method(:instantiate))
values = @hash.keys.select { |k| other.include?(k) }
self.class.new(values)
end
# Operations that requries the other values to be transformed as well
%i[add delete include? subtract].each do |method_name|
define_method(method_name) do |other|
other =
if other.is_a?(self.class)
other
elsif other.is_a?(::Enumerable)
other.map(&method(:instantiate))
else
instantiate(other)
end
super(other)
end
end
private
# Create a new enum instance of the value
def instantiate(value)
value.is_a?(self.class.enum_source) ? value : self.class.enum_source.new(value)
end
# Turn a binary (power) definition into real values
def transform_power(value)
list = value.to_s(2).reverse.chars.map.with_index do |item, idx|
next idx if item.eql?('1')
end
raise raise_invalid(value) if list.size > self.class.size
self.class.members.values_at(*list.compact)
end
# Turn all the values into their respective Enum representations
def transform_values(values)
values = values.first if values.size.eql?(1) && values.first.is_a?(::Enumerable)
values.map(&method(:instantiate)).reject(&:nil?)
end
# Check for valid '?' and '!' methods
def respond_to_missing?(method_name, include_private = false)
name = method_name.to_s
return true if name.chomp!('?')
name.chomp!('!') && self.class.valid?(name)
end
# Allow '_' to be associated to '-'
def method_missing(method_name, *arguments)
name = method_name.to_s
if name.chomp!('?')
include?(name)
elsif name.chomp!('!')
add(name) unless include?(name)
else
super
end
end
# Throw an exception for invalid values
def raise_invalid(value)
if value.is_a?(Numeric)
raise EnumSetError, "#{value.inspect} is out of bounds of #{self.class.name}"
else
raise EnumSetError, "#{value.inspect} is not valid for #{self.class.name}"
end
end
# Throw an exception for comparison between different enums
def raise_comparison(other)
raise EnumSetError, "Comparison of #{self.class.name} with #{self.inspect} failed"
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/full_text_search.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
# For now, full text search doesn't have it's own class
module FullTextSearch
class << self
# Provide a method on the given class to setup which full text search
# columns will be manually initialized
def include_on(klass, method_name = nil)
method_name ||= PostgreSQL.config.full_text_search.base_method
Builder.include_on(klass, method_name, Builder::FullTextSearch)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/lazy.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
class Lazy < BasicObject
def initialize(klass, *values)
@klass, @values = klass, values
end
def ==(other)
other.nil?
end
def nil?
true
end
def inspect
'nil'
end
def __class__
Lazy
end
def method_missing(name, *args, &block)
@klass.new(*@values).send(name, *args, &block)
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes/period.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Attributes
# For now, period doesn't have it's own class
module Period
class << self
# Provide a method on the given class to setup which period columns
# will be manually initialized
def include_on(klass, method_name = nil)
method_name ||= PostgreSQL.config.period.base_method
Builder.include_on(klass, method_name, Builder::Period)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/attributes.rb
================================================
require_relative 'attributes/lazy'
require_relative 'attributes/builder'
================================================
FILE: lib/torque/postgresql/autosave_association.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module AutosaveAssociation
module ClassMethods
# Since belongs to many is a collection, the callback would normally go
# to +after_create+. However, since it is a +belongs_to+ kind of
# association, it neds to be executed +before_save+
def add_autosave_association_callbacks(reflection)
return super unless reflection.macro.eql?(:belongs_to_many)
save_method = :"autosave_associated_records_for_#{reflection.name}"
define_non_cyclic_method(save_method) do
save_belongs_to_many_association(reflection)
end
before_save(save_method)
define_autosave_validation_callbacks(reflection)
end
end
# Ensure the right way to execute +save_collection_association+ and also
# keep it as a single change using +build_changes+
def save_belongs_to_many_association(reflection)
previously_new_record_before_save = (@new_record_before_save ||= false)
@new_record_before_save = new_record?
association = association_instance_get(reflection.name)
association&.build_changes { save_collection_association(reflection) }
rescue ::ActiveRecord::RecordInvalid
throw(:abort)
ensure
@new_record_before_save = previously_new_record_before_save
end
end
::ActiveRecord::Base.singleton_class.prepend(AutosaveAssociation::ClassMethods)
::ActiveRecord::Base.include(AutosaveAssociation)
end
end
================================================
FILE: lib/torque/postgresql/auxiliary_statement/recursive.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class AuxiliaryStatement
class Recursive < AuxiliaryStatement
# Setup any additional option in the recursive mode
def initialize(*, **options)
super
@connect = options[:connect]&.to_a&.first
@union_all = options[:union_all]
@sub_query = options[:sub_query]
if options.key?(:with_depth)
@depth = options[:with_depth].values_at(:name, :start, :as)
@depth[0] ||= 'depth'
end
if options.key?(:with_path)
@path = options[:with_path].values_at(:name, :source, :as)
@path[0] ||= 'path'
end
end
private
# Build the string or arel query
def build_query(base)
# Expose columns and get the list of the ones for select
columns = expose_columns(base, @query.try(:arel_table))
sub_columns = columns.dup
type = @union_all.present? ? 'all' : ''
# Build any extra columns that are dynamic and from the recursion
extra_columns(base, columns, sub_columns)
# Prepare the query depending on its type
if @query.is_a?(String) && @sub_query.is_a?(String)
args = @args.each_with_object({}) { |h, (k, v)| h[k] = base.connection.quote(v) }
::Arel.sql("(#{@query} UNION #{type.upcase} #{@sub_query})" % args)
elsif relation_query?(@query)
@query = @query.where(@where) if @where.present?
@bound_attributes.concat(@query.send(:bound_attributes))
if relation_query?(@sub_query)
@bound_attributes.concat(@sub_query.send(:bound_attributes))
sub_query = @sub_query.select(*sub_columns).arel
sub_query.from([@sub_query.arel_table, table])
else
sub_query = ::Arel.sql(@sub_query)
end
@query.select(*columns).arel.union(type, sub_query)
else
raise ArgumentError, <<-MSG.squish
Only String and ActiveRecord::Base objects are accepted as query and sub query
objects, #{@query.class.name} given for #{self.class.name}.
MSG
end
end
# Setup the statement using the class configuration
def prepare(base, settings)
super
prepare_sub_query(base, settings)
end
# Make sure that both parts of the union are ready
def prepare_sub_query(base, settings)
@union_all = settings.union_all if @union_all.nil?
@sub_query ||= settings.sub_query
@depth ||= settings.depth
@path ||= settings.path
# Collect the connection
@connect ||= settings.connect || begin
key = base.primary_key
[key.to_sym, :"parent_#{key}"] unless key.nil?
end
raise ArgumentError, <<-MSG.squish if @sub_query.nil? && @query.is_a?(String)
Unable to generate sub query from a string query. Please provide a `sub_query`
property on the "#{table_name}" settings.
MSG
if @sub_query.nil?
raise ArgumentError, <<-MSG.squish if @connect.blank?
Unable to generate sub query without setting up a proper way to connect it
with the main query. Please provide a `connect` property on the "#{table_name}"
settings.
MSG
left, right = @connect.map(&:to_s)
condition = @query.arel_table[right].eq(table[left])
if @query.where_values_hash.key?(right)
@sub_query = @query.unscope(where: right.to_sym).where(condition)
else
@sub_query = @query.where(condition)
@query = @query.where(right => nil)
end
elsif @sub_query.respond_to?(:call)
# Call a proc to get the real sub query
call_args = @sub_query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]
@sub_query = @sub_query.call(*call_args)
end
end
# Add depth and path if they were defined in settings
def extra_columns(base, columns, sub_columns)
return if @query.is_a?(String) || @sub_query.is_a?(String)
# Add the connect attribute to the query
if defined?(@connect)
columns.unshift(@query.arel_table[@connect[0]])
sub_columns.unshift(@sub_query.arel_table[@connect[0]])
end
# Build a column to represent the depth of the recursion
if @depth.present?
name, start, as = @depth
col = table[name]
base.select_extra_values += [col.as(as)] unless as.nil?
columns << ::Arel.sql(start.to_s).as(name)
sub_columns << (col + ::Arel.sql('1')).as(name)
end
# Build a column to represent the path of the record access
if @path.present?
name, source, as = @path
source = @query.arel_table[source || @connect[0]]
col = table[name]
base.select_extra_values += [col.as(as)] unless as.nil?
parts = [col, source.pg_cast(:varchar)]
columns << ::Arel.array([source]).pg_cast(:varchar, true).as(name)
sub_columns << FN.array_append(*parts).as(name)
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/auxiliary_statement/settings.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class AuxiliaryStatement
class Settings < Collector.new(:attributes, :join, :join_type, :query, :requires,
:polymorphic, :through, :union_all, :connect)
attr_reader :base, :source, :depth, :path
alias_method :select, :attributes
alias_method :cte, :source
delegate :relation_query?, to: Torque::PostgreSQL::AuxiliaryStatement
delegate :table, :table_name, to: :@source
delegate :sql, to: ::Arel
def initialize(base, source, recursive = false)
@base = base
@source = source
@recursive = recursive
end
def base_name
@base.name
end
def base_table
@base.arel_table
end
def recursive?
@recursive
end
def depth?
defined?(@depth)
end
def path?
defined?(@path)
end
# Add an attribute to the result showing the depth of each iteration
def with_depth(name = 'depth', start: 0, as: nil)
@depth = [name.to_s, start, as&.to_s] if recursive?
end
# Add an attribute to the result showing the path of each record
def with_path(name = 'path', source: nil, as: nil)
@path = [name.to_s, source&.to_s, as&.to_s] if recursive?
end
# Set recursive operation to use union all
def union_all!
@union_all = true if recursive?
end
# Add both depth and path to the result
def with_depth_and_path
with_depth && with_path
end
# Get the arel version of the table set on the query
def query_table
raise StandardError, 'The query is not defined yet' if query.nil?
return query.arel_table if relation_query?(query)
@query_table
end
# Grant an easy access to arel table columns
def col(name)
query_table[name.to_s]
end
alias column col
# There are three ways of setting the query:
# - A simple relation based on a Model
# - A Arel-based select manager
# - A string or a proc
def query(value = nil, command = nil)
return @query if value.nil?
@query = sanitize_query(value, command)
end
# Same as query, but for the second part of the union for recursive cte
def sub_query(value = nil, command = nil)
return unless recursive?
return @sub_query if value.nil?
@sub_query = sanitize_query(value, command)
end
# Assume `parent_` as the other part if provided a Symbol or String
def connect(value = nil)
return @connect if value.nil?
value = [value.to_sym, :"parent_#{value}"] \
if value.is_a?(String) || value.is_a?(Symbol)
value = value.to_a.first if value.is_a?(Hash)
@connect = value
end
alias connect= connect
private
# Get the query and table from the params
def sanitize_query(value, command = nil)
return value if relation_query?(value)
return value if value.is_a?(::Arel::SelectManager)
command = value if command.nil? # For compatibility purposes
valid_type = command.respond_to?(:call) || command.is_a?(String)
raise ArgumentError, <<-MSG.squish unless valid_type
Only relation, string and proc are valid object types for query,
#{command.inspect} given.
MSG
command
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/auxiliary_statement.rb
================================================
# frozen_string_literal: true
require_relative 'auxiliary_statement/settings'
require_relative 'auxiliary_statement/recursive'
module Torque
module PostgreSQL
class AuxiliaryStatement
TABLE_COLUMN_AS_STRING = /\A(?:"?(\w+)"?\.)?"?(\w+)"?\z/.freeze
class << self
attr_reader :config, :table_name
# Find or create the class that will handle statement
def lookup(name, base)
const = name.to_s.camelize << '_' << self.name.demodulize
return base.const_get(const, false) if base.const_defined?(const, false)
base.const_set(const, Class.new(self)).tap do |klass|
klass.instance_variable_set(:@table_name, name.to_s)
end
end
# Create a new instance of an auxiliary statement
def instantiate(statement, base, **options)
klass = while base < ActiveRecord::Base
list = base.auxiliary_statements_list
break list[statement] if list.present? && list.key?(statement)
base = base.superclass
end
return klass.new(**options) unless klass.nil?
raise ArgumentError, <<-MSG.squish
There's no '#{statement}' auxiliary statement defined for #{base.class.name}.
MSG
end
# Fast access to statement build
def build(statement, base, bound_attributes = [], join_sources = [], **options)
klass = instantiate(statement, base, **options)
result = klass.build(base)
bound_attributes.concat(klass.bound_attributes)
join_sources.concat(klass.join_sources)
result
end
# Identify if the query set may be used as a relation
def relation_query?(obj)
!obj.nil? && obj.respond_to?(:ancestors) && \
obj.ancestors.include?(ActiveRecord::Base)
end
# Identify if the query set may be used as arel
def arel_query?(obj)
!obj.nil? && obj.is_a?(::Arel::SelectManager)
end
# A way to create auxiliary statements outside of models configurations,
# being able to use on extensions
def create(table_or_settings, &block)
klass = Class.new(self)
if block_given?
klass.instance_variable_set(:@table_name, table_or_settings)
klass.configurator(block)
elsif relation_query?(table_or_settings)
klass.configurator(query: table_or_settings)
else
klass.configurator(table_or_settings)
end
klass
end
# Set a configuration block or static hash
def configurator(config)
if config.is_a?(Hash)
# Map the aliases
config[:attributes] = config.delete(:select) if config.key?(:select)
# Create the struct that mocks a configuration result
config = OpenStruct.new(config)
table_name = config[:query]&.klass&.name&.underscore
instance_variable_set(:@table_name, table_name)
end
@config = config
end
# Run a configuration block or get the static configuration
def configure(base, instance)
return @config unless @config.respond_to?(:call)
recursive = self < AuxiliaryStatement::Recursive
settings = Settings.new(base, instance, recursive)
settings.instance_exec(settings, &@config)
settings
end
# Get the arel version of the statement table
def table
@table ||= ::Arel::Table.new(table_name)
end
end
delegate :config, :table, :table_name, :relation, :configure, :relation_query?,
to: :class
attr_reader :bound_attributes, :join_sources
# Start a new auxiliary statement giving extra options
def initialize(*, **options)
args_key = Torque::PostgreSQL.config.auxiliary_statement.send_arguments_key
@join = options.fetch(:join, {})
@args = options.fetch(args_key, {})
@where = options.fetch(:where, {})
@select = options.fetch(:select, {})
@join_type = options[:join_type]
@bound_attributes = []
@join_sources = []
end
# Build the statement on the given arel and return the WITH statement
def build(base)
@bound_attributes.clear
@join_sources.clear
# Prepare all the data for the statement
prepare(base, configure(base, self))
# Add the join condition to the list
@join_sources << build_join(base)
# Return the statement with its dependencies
[@dependencies, ::Arel::Nodes::As.new(table, build_query(base))]
end
private
# Setup the statement using the class configuration
def prepare(base, settings)
requires = Array.wrap(settings.requires).flatten.compact
@dependencies = ensure_dependencies(requires, base).flatten.compact
@join_type ||= settings.join_type || :inner
@query = settings.query
# Call a proc to get the real query
if @query.respond_to?(:call)
call_args = @query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]
@query = @query.call(*call_args)
end
# Merge select attributes provided on the instance creation
@select = settings.attributes.merge(@select) if settings.attributes.present?
# Merge join settings
if settings.join.present?
@join = settings.join.merge(@join)
elsif settings.through.present?
@association = settings.through.to_s
elsif relation_query?(@query)
@association = base.reflections.find do |name, reflection|
break name if @query.klass.eql?(reflection.klass)
end
end
end
# Build the string or arel query
def build_query(base)
# Expose columns and get the list of the ones for select
columns = expose_columns(base, @query.try(:arel_table))
# Prepare the query depending on its type
if @query.is_a?(String)
args = @args.map{ |k, v| [k, base.connection.quote(v)] }.to_h
::Arel.sql("(#{@query})" % args)
elsif relation_query?(@query)
@query = @query.where(@where) if @where.present?
@bound_attributes.concat(@query.send(:bound_attributes))
@query.select(*columns).arel
else
raise ArgumentError, <<-MSG.squish
Only String and ActiveRecord::Base objects are accepted as query objects,
#{@query.class.name} given for #{self.class.name}.
MSG
end
end
# Build the join statement that will be sent to the main arel
def build_join(base)
conditions = table.create_and([])
builder = base.predicate_builder
foreign_table = base.arel_table
# Check if it's necessary to load the join from an association
if @association.present?
association = base.reflections[@association]
# Require source of a through reflection
if association.through_reflection?
base.joins(association.source_reflection_name)
# Changes the base of the connection to the reflection table
builder = association.klass.predicate_builder
foreign_table = ::Arel::Table.new(association.plural_name)
end
@query.merge(association.join_scope(@query.arel_table, foreign_table, base))
# Add the join constraints
constraint = association.build_join_constraint(table, foreign_table)
constraint = constraint.children if constraint.is_a?(::Arel::Nodes::And)
conditions.children.concat(Array.wrap(constraint))
end
# Build all conditions for the join on statement
@join.inject(conditions.children) do |arr, (left, right)|
left = project(left, foreign_table)
item = right.is_a?(Symbol) ? project(right).eq(left) : builder.build(left, right)
arr.push(item)
end
# Raise an error when there's no join conditions
raise ArgumentError, <<-MSG.squish if conditions.children.empty?
You must provide the join columns when using '#{@query.class.name}'
as a query object on #{self.class.name}.
MSG
# Build the join based on the join type
arel_join.new(table, table.create_on(conditions))
end
# Get the class of the join on arel
def arel_join
case @join_type
when :inner then ::Arel::Nodes::InnerJoin
when :left then ::Arel::Nodes::OuterJoin
when :right then ::Arel::Nodes::RightOuterJoin
when :full then ::Arel::Nodes::FullOuterJoin
else
raise ArgumentError, <<-MSG.squish
The '#{@join_type}' is not implemented as a join type.
MSG
end
end
# Mount the list of selected attributes
def expose_columns(base, query_table = nil)
# Add the columns necessary for the join
list = @join_sources.each_with_object(@select) do |join, hash|
join.right.expr.children.each do |item|
hash[item.left.name] = nil if item.left.relation.eql?(table)
end
end
# Add select columns to the query and get exposed columns
list.filter_map do |left, right|
base.select_extra_values += [table[right.to_s]] unless right.nil?
next unless query_table
col = project(left, query_table)
right.nil? ? col : col.as(right.to_s)
end
end
# Ensure that all the dependencies are loaded in the base relation
def ensure_dependencies(list, base)
with_options = list.extract_options!.to_a
(list + with_options).map do |name, options|
dependent_klass = base.model.auxiliary_statements_list[name]
raise ArgumentError, <<-MSG.squish if dependent_klass.nil?
The '#{name}' auxiliary statement dependency can't found on
#{self.class.name}.
MSG
next if base.auxiliary_statements_values.any? do |cte|
cte.is_a?(dependent_klass)
end
options ||= {}
AuxiliaryStatement.build(name, base, bound_attributes, join_sources, **options)
end
end
# Project a column on a given table, or use the column table
def project(column, arel_table = nil)
if column.respond_to?(:as)
return column
elsif (as_string = TABLE_COLUMN_AS_STRING.match(column.to_s))
column = as_string[2]
arel_table = ::Arel::Table.new(as_string[1]) unless as_string[1].nil?
end
arel_table ||= table
arel_table[column.to_s]
end
end
end
end
================================================
FILE: lib/torque/postgresql/base.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Base
extend ActiveSupport::Concern
##
# :singleton-method: schema
# :call-seq: schema
#
# The schema to which the table belongs to.
included do
mattr_accessor :belongs_to_many_required_by_default, instance_accessor: false
class_attribute :schema, instance_writer: false
end
class_methods do
delegate :distinct_on, :with, :itself_only, :cast_records, :join_series,
:buckets, to: :all
# Make sure that table name is an instance of TableName class
def reset_table_name
return super unless PostgreSQL.config.schemas.enabled
self.table_name = TableName.new(self, super)
end
# Whenever the base model is inherited, add a list of auxiliary
# statements like the one that loads inherited records' relname
def inherited(subclass)
super
subclass.class_attribute(:auxiliary_statements_list)
subclass.auxiliary_statements_list = {}
record_class = ActiveRecord::Relation._record_class_attribute
# Define the dynamic attribute that returns the same information as
# the one provided by the auxiliary statement
subclass.dynamic_attribute(record_class) do
klass = self.class
next klass.table_name unless klass.physically_inheritances?
query = klass.unscoped.where(subclass.primary_key => id)
query.pluck(klass.arel_table['tableoid'].pg_cast('regclass')).first
end
end
# Specifies a one-to-many association. The following methods for
# retrieval and query of collections of associated objects will be
# added:
#
# +collection+ is a placeholder for the symbol passed as the +name+
# argument, so belongs_to_many :tags would add among others
# tags.empty?.
#
# [collection]
# Returns a Relation of all the associated objects.
# An empty Relation is returned if none are found.
# [collection<<(object, ...)]
# Adds one or more objects to the collection by adding their ids to
# the array of ids on the parent object.
# Note that this operation instantly fires update SQL without waiting
# for the save or update call on the parent object, unless the parent
# object is a new record.
# This will also run validations and callbacks of associated
# object(s).
# [collection.delete(object, ...)]
# Removes one or more objects from the collection by removing their
# ids from the list on the parent object.
# Objects will be in addition destroyed if they're associated with
# dependent: :destroy, and deleted if they're associated
# with dependent: :delete_all.
# [collection.destroy(object, ...)]
# Removes one or more objects from the collection by running
# destroy on each record, regardless of any dependent option,
# ensuring callbacks are run. They will also be removed from the list
# on the parent object.
# [collection=objects]
# Replaces the collections content by deleting and adding objects as
# appropriate.
# [collection_singular_ids]
# Returns an array of the associated objects' ids
# [collection_singular_ids=ids]
# Replace the collection with the objects identified by the primary
# keys in +ids+. This method loads the models and calls
# collection=. See above.
# [collection.clear]
# Removes every object from the collection. This destroys the
# associated objects if they are associated with
# dependent: :destroy, deletes them directly from the
# database if dependent: :delete_all, otherwise just remove
# them from the list on the parent object.
# [collection.empty?]
# Returns +true+ if there are no associated objects.
# [collection.size]
# Returns the number of associated objects.
# [collection.find(...)]
# Finds an associated object according to the same rules as
# ActiveRecord::FinderMethods#find.
# [collection.exists?(...)]
# Checks whether an associated object with the given conditions exists.
# Uses the same rules as ActiveRecord::FinderMethods#exists?.
# [collection.build(attributes = {}, ...)]
# Returns one or more new objects of the collection type that have
# been instantiated with +attributes+ and linked to this object by
# adding its +id+ to the list after saving.
# [collection.create(attributes = {})]
# Returns a new object of the collection type that has been
# instantiated with +attributes+, linked to this object by adding its
# +id+ to the list after performing the save (if it passed the
# validation).
# [collection.create!(attributes = {})]
# Does the same as collection.create, but raises
# ActiveRecord::RecordInvalid if the record is invalid.
# [collection.reload]
# Returns a Relation of all of the associated objects, forcing a
# database read. An empty Relation is returned if none are found.
#
# === Example
#
# A Video class declares belongs_to_many :tags,
# which will add:
# * Video#tags (similar to Tag.where([id] && tag_ids))
# * Video#tags<<
# * Video#tags.delete
# * Video#tags.destroy
# * Video#tags=
# * Video#tag_ids
# * Video#tag_ids=
# * Video#tags.clear
# * Video#tags.empty?
# * Video#tags.size
# * Video#tags.find
# * Video#tags.exists?(name: 'ACME')
# * Video#tags.build
# * Video#tags.create
# * Video#tags.create!
# * Video#tags.reload
# The declaration can also include an +options+ hash to specialize the
# behavior of the association.
#
# === Options
# [:class_name]
# Specify the class name of the association. Use it only if that name
# can't be inferred from the association name. So belongs_to_many
# :tags will by default be linked to the +Tag+ class, but if the
# real class name is +SpecialTag+, you'll have to specify it with this
# option.
# [:foreign_key]
# Specify the foreign key used for the association. By default this is
# guessed to be the name of this class in lower-case and "_ids"
# suffixed. So a Video class that makes a #belongs_to_many association
# with Tag will use "tag_ids" as the default :foreign_key.
#
# It is a good idea to set the :inverse_of option as well.
# [:primary_key]
# Specify the name of the column to use as the primary key for the
# association. By default this is +id+.
# [:dependent]
# Controls what happens to the associated objects when their owner is
# destroyed. Note that these are implemented as callbacks, and Rails
# executes callbacks in order. Therefore, other similar callbacks may
# affect the :dependent behavior, and the :dependent
# behavior may affect other callbacks.
# [:touch]
# If true, the associated objects will be touched (the updated_at/on
# attributes set to current time) when this record is either saved or
# destroyed. If you specify a symbol, that attribute will be updated
# with the current time in addition to the updated_at/on attribute.
# Please note that with touching no validation is performed and only
# the +after_touch+, +after_commit+ and +after_rollback+ callbacks are
# executed.
# [:optional]
# When set to +true+, the association will not have its presence
# validated.
# [:required]
# When set to +true+, the association will also have its presence
# validated. This will validate the association itself, not the id.
# You can use +:inverse_of+ to avoid an extra query during validation.
# NOTE: required is set to false by default and is
# deprecated. If you want to have association presence validated,
# use required: true.
# [:default]
# Provide a callable (i.e. proc or lambda) to specify that the
# association should be initialized with a particular record before
# validation.
# [:inverse_of]
# Specifies the name of the #has_many association on the associated
# object that is the inverse of this #belongs_to_many association.
# See ActiveRecord::Associations::ClassMethods's overview on
# Bi-directional associations for more detail.
#
# Option examples:
# belongs_to_many :tags, dependent: :nullify
# belongs_to_many :tags, required: true, touch: true
# belongs_to_many :tags, default: -> { Tag.default }
def belongs_to_many(name, scope = nil, **options, &extension)
klass = Associations::Builder::BelongsToMany
reflection = klass.build(self, name, scope, options, &extension)
::ActiveRecord::Reflection.add_reflection(self, name, reflection)
end
protected
# Allow optional select attributes to be loaded manually when they are
# not present. This is associated with auxiliary statement, which
# permits columns that can be loaded through CTEs, be loaded
# individually for a single record
#
# For instance, if you have a statement that can load an user's last
# comment content, by querying the comments using an auxiliary
# statement.
# subclass.auxiliary_statement :last_comment do |cte|
# cte.query Comment.order(:user_id, id: :desc)
# .distinct_on(:user_id)
# cte.attributes col(:content) => :last_comment
# cte.join_type :left
# end
#
# In case you don't use 'with(:last_comment)', you can do the
# following.
# dynamic_attribute(:last_comment) do
# comments.order(id: :desc).first.content
# end
#
# This means that any auxiliary statements can have their columns
# granted even when they are not used
def dynamic_attribute(name, &block)
define_method(name) do
return read_attribute(name) if has_attribute?(name)
result = self.instance_exec(&block)
type_klass = ActiveRecord::Type.respond_to?(:default_value) \
? ActiveRecord::Type.default_value \
: self.class.connection.type_map.send(:default_value)
@attributes[name.to_s] = ActiveRecord::Relation::QueryAttribute.new(
name.to_s, result, type_klass,
)
read_attribute(name)
end
end
# Creates a new auxiliary statement (CTE) under the base class
# attributes key:
# Provides a map of attributes to be exposed to the main query.
#
# For instance, if the statement query has an 'id' column that you
# want it to be accessed on the main query as 'item_id',
# you can use:
# attributes id: :item_id, 'MAX(id)' => :max_id,
# col(:id).minimum => :min_id
#
# If its statement has more tables, and you want to expose those
# fields, then:
# attributes 'table.name': :item_name
#
# join_type key:
# Changes the type of the join and set the constraints
#
# The left side of the hash is the source table column, the right
# side is the statement table column, now it's only accepting '='
# constraints
# join id: :user_id
# join id: :'user.id'
# join 'post.id': :'user.last_post_id'
#
# It's possible to change the default type of join
# join :left, id: :user_id
#
# join key:
# Changes the type of the join
#
# query key:
# Save the query command to be performand
#
# requires key:
# Indicates dependencies with another statements
#
# polymorphic key:
# Indicates a polymorphic relationship, with will affect the way the
# auto join works, by giving a polymorphic connection
def auxiliary_statement(table, &block)
klass = AuxiliaryStatement.lookup(table, self)
auxiliary_statements_list[table.to_sym] = klass
klass.configurator(block)
end
alias cte auxiliary_statement
# Creates a new recursive auxiliary statement (CTE) under the base
# Very similar to the regular auxiliary statement, but with two-part
# query where one is executed first and the second recursively
def recursive_auxiliary_statement(table, &block)
klass = AuxiliaryStatement::Recursive.lookup(table, self)
auxiliary_statements_list[table.to_sym] = klass
klass.configurator(block)
end
alias recursive_cte recursive_auxiliary_statement
end
end
::ActiveRecord::Base.include(Base)
end
end
================================================
FILE: lib/torque/postgresql/collector.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Collector
# This class helps to collect data in different ways. Used to configure
# auxiliary statements
def self.new(*args)
klass = Class.new
args.flatten!
args.compact!
klass.module_eval do
args.each do |attribute|
define_method attribute do |*args|
if args.empty?
instance_variable_get("@#{attribute}")
elsif args.size > 1
instance_variable_set("@#{attribute}", args)
else
instance_variable_set("@#{attribute}", args.first)
end
end
alias_method "#{attribute}=", attribute
end
end
klass
end
end
end
end
================================================
FILE: lib/torque/postgresql/config.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
include ActiveSupport::Configurable
# Use the same logger as the Active Record one
def self.logger
ActiveRecord::Base.logger
end
# Allow nested configurations
# :TODO: Rely on +inheritable_copy+ to make nested configurations
config.define_singleton_method(:nested) do |name, &block|
klass = Class.new(ActiveSupport::Configurable::Configuration).new
block.call(klass) if block
send("#{name}=", klass)
end
# Set if any information that requires querying and searching or collecting
# information should be eager loaded. This automatically changes when rails
# same configuration is set to true
config.eager_load = false
# Add support for joining any query/association with a generated series
config.join_series = true
# Add support for querying and calculating histogram buckets
config.buckets = true
# Set a list of irregular model name when associated with table names
config.irregular_models = {}
def config.irregular_models=(hash)
PostgreSQL.config[:irregular_models] = hash.map do |(table, model)|
[table.to_s, model.to_s]
end.to_h
end
# Configure associations features
config.nested(:associations) do |assoc|
# Define if +belongs_to_many+ associations are marked as required by
# default. False means that no validation will be performed
assoc.belongs_to_many_required_by_default = false
# Although +belongs_to_many+ does not need a custom handler when joining
# the last chain scope, this can allow devs to pick which way they prefer:
# Rails default, or ANY with a single bind to improve prepared statements
# assoc.optimize_for_binds = false TODO: Add support
end
# Configure multiple schemas
config.nested(:schemas) do |schemas|
# Enables schemas handler by this gem, not Rails's own implementation
schemas.enabled = true
# Defines a list of LIKE-based schemas to not consider for a multiple
# schema database
schemas.blacklist = %w[information_schema pg_%]
# Defines a list of LIKE-based schemas to consider for a multiple schema
# database
schemas.whitelist = %w[public]
end
# Configure auxiliary statement features
config.nested(:auxiliary_statement) do |cte|
# Enables auxiliary statements handler by this gem, not Rails's own
# implementation
cte.enabled = true
# Define the key that is used on auxiliary statements to send extra
# arguments to format string or send on a proc
cte.send_arguments_key = :args
# Estipulate a class name (which may contain namespace) that exposes the
# auxiliary statement in order to perform detached CTEs
cte.exposed_class = 'TorqueCTE'
# Estipulate a class name (which may contain namespace) that exposes the
# recursive auxiliary statement in order to perform detached CTEs
cte.exposed_recursive_class = 'TorqueRecursiveCTE'
end
# Configure ENUM features
config.nested(:enum) do |enum|
# Enables enum handler by this gem, not Rails's own implementation
enum.enabled = true
# The name of the method to be used on any ActiveRecord::Base to
# initialize model-based enum features
enum.base_method = :torque_enum
# The name of the method to be used on any ActiveRecord::Base to
# initialize model-based enum set features
enum.set_method = :torque_enum_set
# Indicates if bang methods like 'disabled!' should update the record on
# database or not
enum.save_on_bang = true
# Indicates if it should raise errors when a generated method would
# conflict with an existing one
enum.raise_conflicting = false
# Specify the namespace of each enum type of value
enum.namespace = nil
# Specify the scopes for I18n translations
enum.i18n_scopes = [
'activerecord.attributes.%{model}.%{attr}.%{value}',
'activerecord.attributes.%{attr}.%{value}',
'activerecord.enums.%{type}.%{value}',
'enum.%{type}.%{value}',
'enum.%{value}'
]
# Specify the scopes for I18n translations but with type only
enum.i18n_type_scopes = Enumerator.new do |yielder|
enum.i18n_scopes.each do |key|
next if key.include?('%{model}') || key.include?('%{attr}')
yielder << key
end
end
end
# Configure geometry data types
config.nested(:geometry) do |geometry|
# Enables geometry handler by this gem, not Rails's own implementation
geometry.enabled = true
# Define the class that will be handling Point data types after decoding
# it. Any class provided here must respond to 'x', and 'y'
geometry.point_class = ActiveRecord::Point
# Define the class that will be handling Box data types after decoding it.
# Any class provided here must respond to 'x1', 'y1', 'x2', and 'y2'
geometry.box_class = nil
# Define the class that will be handling Circle data types after decoding
# it. Any class provided here must respond to 'x', 'y', and 'r'
geometry.circle_class = nil
# Define the class that will be handling Line data types after decoding
# it. Any class provided here must respond to 'a', 'b', and 'c'
geometry.line_class = nil
# Define the class that will be handling Segment data types after decoding
# it. Any class provided here must respond to 'x1', 'y1', 'x2', and 'y2'
geometry.segment_class = nil
end
# Configure inheritance features
config.nested(:inheritance) do |inheritance|
# Define the lookup of models from their given name to be inverted, which
# means that they are going to be form the last namespaced one to the
# most namespaced one
inheritance.inverse_lookup = true
# Determines the name of the column used to collect the table of each
# record. When the table has inheritance tables, this column will return
# the name of the table that actually holds the record
inheritance.record_class_column_name = :_record_class
# Determines the name of the column used when identifying that the loaded
# records should be casted to its correctly model. This will be TRUE for
# the records mentioned on `cast_records`
inheritance.auto_cast_column_name = :_auto_cast
end
# Configure period features
config.nested(:period) do |period|
# Enables period handler by this gem
period.enabled = true
# The name of the method to be used on any ActiveRecord::Base to
# initialize model-based period features
period.base_method = :period_for
# The default name for a threshold attribute, which will automatically
# enable threshold features
period.auto_threshold = :threshold
# Define the list of methods that will be created by default while setting
# up a new period field
period.method_names = {
current_on: '%s_on', # 00
current: 'current_%s', # 01
not_current: 'not_current_%s', # 02
containing: '%s_containing', # 03
not_containing: '%s_not_containing', # 04
overlapping: '%s_overlapping', # 05
not_overlapping: '%s_not_overlapping', # 06
starting_after: '%s_starting_after', # 07
starting_before: '%s_starting_before', # 08
finishing_after: '%s_finishing_after', # 09
finishing_before: '%s_finishing_before', # 10
real_containing: '%s_real_containing', # 11
real_overlapping: '%s_real_overlapping', # 12
real_starting_after: '%s_real_starting_after', # 13
real_starting_before: '%s_real_starting_before', # 14
real_finishing_after: '%s_real_finishing_after', # 15
real_finishing_before: '%s_real_finishing_before', # 16
containing_date: '%s_containing_date', # 17
not_containing_date: '%s_not_containing_date', # 18
overlapping_date: '%s_overlapping_date', # 19
not_overlapping_date: '%s_not_overlapping_date', # 20
real_containing_date: '%s_real_containing_date', # 21
real_overlapping_date: '%s_real_overlapping_date', # 22
current?: 'current_%s?', # 23
current_on?: 'current_%s_on?', # 24
start: '%s_start', # 25
finish: '%s_finish', # 26
real: 'real_%s', # 27
real_start: '%s_real_start', # 28
real_finish: '%s_real_finish', # 29
}
# If the period is marked as direct access, without the field name,
# then these method names will replace the default ones
period.direct_method_names = {
current_on: 'happening_in',
containing: 'during',
not_containing: 'not_during',
real_containing: 'real_during',
containing_date: 'during_date',
not_containing_date: 'not_during_date',
current_on?: 'happening_in?',
start: 'start_at',
finish: 'finish_at',
real: 'real_time',
real_start: 'real_start_at',
real_finish: 'real_finish_at',
}
end
# Configure period features
config.nested(:interval) do |interval|
# Enables interval handler by this gem, not Rails's own implementation
interval.enabled = true
end
# Configure arel additional features
config.nested(:arel) do |arel|
# When provided, the initializer will expose the Arel function helper on
# the given module
config.expose_function_helper_on = nil
# List of Arel INFIX operators that will be made available for using as
# methods on Arel::Nodes::Node and Arel::Attribute
arel.infix_operators = {
'contained_by' => '<@',
'has_key' => '?',
'has_all_keys' => '?&',
'has_any_keys' => '?|',
'strictly_left' => '<<',
'strictly_right' => '>>',
'doesnt_right_extend' => '&<',
'doesnt_left_extend' => '&>',
'adjacent_to' => '-|-',
}
end
# Configure full text search features
config.nested(:full_text_search) do |fts|
# Enables full text search handler by this gem
fts.enabled = true
# The name of the method to be used on any ActiveRecord::Base to
# initialize model-based full text search features
fts.base_method = :torque_search_for
# Defines the default language when generating search vector columns
fts.default_language = 'english'
# Defines the default mode to be used when generating full text search
# queries. It can be one of the following:
# - :default (to_tsquery)
# - :phrase (phraseto_tsquery)
# - :plain (plainto_tsquery)
# - :web (websearch_to_tsquery)
fts.default_mode = :phrase
# Defines the default index type to be used when creating search vector.
# It still requires that the column requests an index
fts.default_index_type = :gin
end
# Configure predicate builder additional features
config.nested(:predicate_builder) do |builder|
# List which handlers are enabled by default
builder.enabled = %i[regexp arel_attribute enumerator_lazy]
# When active, values provided to array attributes will be handled more
# friendly. It will use the +ANY+ operator on a equality check and
# overlaps when the given value is an array
builder.handle_array_attributes = false
# Make sure that the predicate builder will not spend more than 20ms
# trying to produce the underlying array
builder.lazy_timeout = 0.02
# Since lazy array is uncommon, it is better to limit the number of
# entries we try to pull so we don't cause a timeout or a long wait
# iteration
builder.lazy_limit = 2_000
end
# Configure versioned commands features
config.nested(:versioned_commands) do |vs|
# This is a feature that developers must explicitly opt-in. It is designed
# in a way that prevents a large impact on Rails' original migrations
# behavior. But, it is still a feature that everyone may not need, and
# some may complain about the additional schema table, which also uses
# inheritance
vs.enabled = false
# Define the list of commands that are going to be versioned by this
# method
vs.types = %i[function type view]
# The name of the table that will inherit from +schema_migrations+ and
# store the list of versioned commands that have been executed
vs.table_name = 'schema_versioned_commands'
end
end
end
================================================
FILE: lib/torque/postgresql/function.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
# Simplified module for creating arel functions. This is used internally
# but can also be made available to other devs on their own projects
module Function
class << self
# A facilitator to create a bind param that is fully compatible with
# Arel and ActiveRecord
def bind(*args)
attr = ::ActiveRecord::Relation::QueryAttribute.new(*args)
::Arel::Nodes::BindParam.new(attr)
end
# Just a shortcut to create a bind param for a model attribute and a
# value for it
def bind_for(model, attribute, value)
bind(attribute, value, model.attribute_types[attribute])
end
# Another shortcut, when we already have the arel attribute at hand
def bind_with(arel_attribute, value)
bind(arel_attribute.name, value, arel_attribute.type_caster)
end
# A facilitator to create a bind param with a specific type
def bind_type(value, type = nil, name: 'value', cast: nil)
type ||= ruby_type_to_model_type(value)
type = ActiveModel::Type.lookup(type) if type.is_a?(Symbol)
result = bind(name, value, type)
cast ? result.pg_cast(cast) : result
end
# A facilitator to create an infix operation
def infix(op, left, right)
::Arel::Nodes::InfixOperation.new(op, left, right)
end
# A facilitator to use several Infix operators to concatenate all the
# provided arguments. Arguments won't be sanitized, as other methods
# under this module
def concat(*args)
return args.first if args.one?
args.reduce { |left, right| infix(:"||", left, right) }
end
# A simple helper to trick Rails into producing the right SQL for
# grouping operations
def group_by(arel, name)
Arel::Nodes::Ref.new(name.to_s, arel)
end
# As of now, this indicates that it supports any direct calls, since
# the idea is to simply map to an Arel function with the same name,
# without checking if it actually exists
def respond_to_missing?(*)
true
end
# This method is used to catch any method calls that are not defined
# in this module. It will simply return an Arel function with the same
# name as the method called, passing all arguments to it, without
# any sanitization
def method_missing(name, *args, &block)
::Arel::Nodes::NamedFunction.new(name.to_s.upcase, args)
end
private
def ruby_type_to_model_type(value)
case value
when Integer then :integer
when Float then :float
when String then :string
when Time, ActiveSupport::TimeWithZone then :time
when TrueClass, FalseClass then :boolean
when DateTime then :datetime
when Date then :date
when BigDecimal then :decimal
when ActiveSupport::Duration
Adapter::OID::Interval.new
else
raise ArgumentError, "Cannot infer type from value: #{value.inspect}."
end
end
end
end
FN = Function
end
end
================================================
FILE: lib/torque/postgresql/geometry_builder.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class GeometryBuilder < ActiveModel::Type::Value
DESTRUCTOR = /[<>{}()]/.freeze
NUMBER_SERIALIZER = ->(num) { num.to_s.gsub(/\.0$/, '') }
def type
return self.class.const_get('TYPE') if self.class.const_defined?('TYPE')
self.class.const_set('TYPE', self.class.name.demodulize.underscore.to_sym)
end
def pieces
self.class.const_get('PIECES')
end
def formation
self.class.const_get('FORMATION')
end
def cast(value)
case value
when ::String
return if value.blank?
value.gsub!(DESTRUCTOR, '')
build_klass(*value.split(','))
when ::Hash
build_klass(*value.symbolize_keys.slice(*pieces).values)
when ::Array
build_klass(*(value.flatten))
else
value
end
end
def serialize(value)
parts =
case value
when config_class
pieces.map { |piece| value.public_send(piece) }
when ::Hash
value.symbolize_keys.slice(*pieces).values
when ::Array
value.flatten
end
parts = parts&.compact&.flatten
return if parts.blank?
raise 'Invalid format' if parts.size < pieces.size
format(formation, *parts.first(pieces.size).map(&number_serializer))
end
def deserialize(value)
build_klass(*value.gsub(DESTRUCTOR, '').split(',')) unless value.nil?
end
def type_cast_for_schema(value)
if config_class === value
pieces.map { |piece| value.public_send(piece) }
else
super
end
end
def changed_in_place?(raw_old_value, new_value)
raw_old_value != serialize(new_value)
end
protected
def number_serializer
self.class.const_get('NUMBER_SERIALIZER')
end
def config_class
Torque::PostgreSQL.config.geometry.public_send("#{type}_class")
end
def build_klass(*args)
return nil if args.empty?
check_invalid_format!(args)
config_class.new(*args.try(:first, pieces.size)&.map(&:to_f))
end
def check_invalid_format!(args)
raise 'Invalid format' if args.size < pieces.size
end
end
end
end
================================================
FILE: lib/torque/postgresql/i18n.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module I18n
# Adds extra suport to localize durations
# This is a temporary solution, since 3600.seconds does not translate into
# 1 hour
def localize(locale, object, format = :default, options = {})
return super unless object.is_a?(ActiveSupport::Duration)
object.inspect
end
end
::I18n::Backend::Base.prepend I18n
end
end
================================================
FILE: lib/torque/postgresql/inheritance.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
InheritanceError = Class.new(ArgumentError)
module Inheritance
extend ActiveSupport::Concern
# Cast the given object to its correct class
def cast_record
record_class_value = send(self.class._record_class_attribute)
return self unless self.class.table_name != record_class_value
klass = self.class.casted_dependents[record_class_value]
self.class.raise_unable_to_cast(record_class_value) if klass.nil?
# The record need to be re-queried to have its attributes loaded
# :TODO: Improve this by only loading the necessary extra columns
klass.find(self.id)
end
class_methods do
delegate :_auto_cast_attribute, :_record_class_attribute, to: ActiveRecord::Relation
# Get a full list of all attributes from a model and all its dependents
def inheritance_merged_attributes
@inheritance_merged_attributes ||= begin
children = casted_dependents.values.flat_map(&:attribute_names)
attribute_names.to_set.merge(children).to_a.freeze
end
end
# Get the list of attributes that can be merged while querying because
# they all have the same type
def inheritance_mergeable_attributes
@inheritance_mergeable_attributes ||= begin
base = inheritance_merged_attributes - attribute_names
types = base.zip(base.size.times.map { [] }).to_h
casted_dependents.values.each do |klass|
klass.attribute_types.each do |column, type|
types[column]&.push(type)
end
end
result = types.filter_map do |attribute, types|
attribute if types.each_with_object(types.shift).all?(&:==)
end
(attribute_names + result).freeze
end
end
# Check if the model's table depends on any inheritance
def physically_inherited?
return @physically_inherited if defined?(@physically_inherited)
@physically_inherited = connection.schema_cache.dependencies(
defined?(@table_name) ? @table_name : decorated_table_name,
).present?
rescue ActiveRecord::ConnectionNotEstablished
false
end
# Get the list of all tables directly or indirectly dependent of the
# current one
def inheritance_dependents
connection.schema_cache.associations(table_name) || []
end
# Check whether the model's table has directly or indirectly dependents
def physically_inheritances?
inheritance_dependents.present?
end
# Get the list of all ActiveRecord classes directly or indirectly
# associated by inheritance
def casted_dependents
@casted_dependents ||= inheritance_dependents.map do |table_name|
[table_name, connection.schema_cache.lookup_model(table_name)]
end.to_h
end
# Manually set the model name associated with tables name in order to
# facilitates the identification of inherited records
def reset_table_name
table = super
adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
if Torque::PostgreSQL.config.eager_load && connection.is_a?(adapter)
connection.schema_cache.add_model_name(table, self)
end
table
end
# Get the final decorated table, regardless of any special condition
def decorated_table_name
parent_class = try(:module_parent) || try(:parent)
if parent_class < Base && !parent_class.abstract_class?
contained = parent_class.table_name
contained = contained.singularize if parent_class.pluralize_table_names
contained += "_"
end
"#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}"
end
# For all main purposes, physical inherited classes should have
# base_class as their own
def base_class
physically_inherited? ? self : super
end
# Primary key is one exception when getting information about the class,
# it must returns the superclass PK
def primary_key
physically_inherited? ? superclass.primary_key : super
end
# Add an additional check to return the name of the table even when the
# class is inherited, but only if it is a physical inheritance
def compute_table_name
physically_inherited? ? decorated_table_name : super
end
# Raises an error message saying that the giver record class was not
# able to be casted since the model was not identified
def raise_unable_to_cast(record_class_value)
raise InheritanceError.new(<<~MSG.squish)
An record was not able to be casted to type '#{record_class_value}'.
If this table name doesn't represent a guessable model,
please use 'Torque::PostgreSQL.conf.irregular_models =
{ '#{record_class_value}' => 'ModelName' }'.
MSG
end
private
# If the class is physically inherited, the klass needs to be properly
# changed before moving forward
def instantiate_instance_of(klass, attributes, types = {}, &block)
return super unless klass.physically_inheritances?
real_class = torque_discriminate_class_for_record(klass, attributes)
return super if real_class.nil?
attributes, types = sanitize_attributes(real_class, attributes, types)
super(real_class, attributes, types, &block)
end
# Unwrap the attributes and column types from the given class when
# there are unmergeable attributes
def sanitize_attributes(real_class, attributes, types)
skip = (inheritance_merged_attributes - real_class.attribute_names).to_set
skip.merge(real_class.attribute_names - inheritance_mergeable_attributes)
return [attributes, types] if skip.empty?
dropped = 0
new_types = {}
row = attributes.instance_variable_get(:@row).dup
indexes = attributes.instance_variable_get(:@column_indexes).dup
indexes = indexes.each_with_object({}) do |(column, index), new_indexes|
attribute, prefix = column.split('__', 2).reverse
current_index = index - dropped
if prefix != table_name && skip.include?(attribute)
row.delete_at(current_index)
dropped += 1
else
new_types.merge!(types.slice(attribute))
new_types[current_index] = types[index]
new_indexes[attribute] = current_index
end
end
[ActiveRecord::Result::IndexedRow.new(indexes, row), new_types]
end
# Get the real class when handling physical inheritances and casting
# the record when existing properly is present
def torque_discriminate_class_for_record(klass, record)
return if record[_auto_cast_attribute.to_s] == false
embedded_type = record[_record_class_attribute.to_s]
return if embedded_type.blank? || embedded_type == table_name
casted_dependents[embedded_type] || raise_unable_to_cast(embedded_type)
end
end
end
ActiveRecord::Base.include Inheritance
end
end
================================================
FILE: lib/torque/postgresql/insert_all.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module InsertAll
attr_reader :where
def initialize(*args, where: nil, **xargs)
super(*args, **xargs)
@where = where
end
end
module InsertAll::Builder
delegate :where, to: :insert_all
def where_condition?
!where.nil?
end
end
ActiveRecord::InsertAll.prepend InsertAll
ActiveRecord::InsertAll::Builder.include InsertAll::Builder
end
end
================================================
FILE: lib/torque/postgresql/migration/command_recorder.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Migration
module CommandRecorder
# Records the rename operation for types
def rename_type(*args, &block)
record(:rename_type, args, &block)
end
# Inverts the type rename operation
def invert_rename_type(args)
[:rename_type, args.reverse]
end
# Records the creation of a schema
def create_schema(*args, &block)
record(:create_schema, args, &block)
end
# Inverts the creation of a schema
def invert_create_schema(args)
[:drop_schema, [args.first]]
end
end
ActiveRecord::Migration::CommandRecorder.include CommandRecorder
end
end
end
================================================
FILE: lib/torque/postgresql/migration.rb
================================================
require_relative 'migration/command_recorder'
================================================
FILE: lib/torque/postgresql/predicate_builder/arel_attribute_handler.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module PredicateBuilder
class ArelAttributeHandler
# Shortcut
def self.call(*args)
new.call(*args)
end
def initialize(*)
# There is no need to use or save the predicate builder here
end
def call(attribute, value)
case
when array_typed?(attribute) && array_typed?(value) then attribute.overlaps(value)
when array_typed?(attribute) then value.eq(FN.any(attribute))
when array_typed?(value) then attribute.eq(FN.any(value))
else attribute.eq(value)
end
end
private
def array_typed?(attribute)
attribute.able_to_type_cast? && attribute.type_caster.is_a?(ARRAY_OID)
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/predicate_builder/array_handler.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module PredicateBuilder
module ArrayHandler
def call(attribute, value)
return super unless array_attribute?(attribute) &&
PostgreSQL.config.predicate_builder.handle_array_attributes
call_for_array(attribute, value)
end
def call_for_array(attribute, value)
if !value.is_a?(::Array)
call_with_value(attribute, value)
elsif value.any?
call_with_array(attribute, value)
else
call_with_empty(attribute)
end
end
private
def call_with_value(attribute, value)
FN.infix(:"=", FN.bind_with(attribute, value), FN.any(attribute))
end
def call_with_array(attribute, value)
attribute.overlaps(FN.bind_with(attribute, value))
end
def call_with_empty(attribute)
FN.cardinality(attribute).eq(0)
end
def array_attribute?(attribute)
attribute.type_caster.is_a?(ARRAY_OID)
end
end
::ActiveRecord::PredicateBuilder::ArrayHandler.prepend(ArrayHandler)
::ActiveRecord::PredicateBuilder::BasicObjectHandler.prepend(ArrayHandler)
end
end
end
================================================
FILE: lib/torque/postgresql/predicate_builder/enumerator_lazy_handler.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module PredicateBuilder
class EnumeratorLazyHandler < ::ActiveRecord::PredicateBuilder::ArrayHandler
Timeout = Class.new(::Timeout::Error)
def call(attribute, value)
with_timeout do
super(attribute, limit.nil? ? value.force : value.first(limit))
end
end
private
def with_timeout
return yield if timeout.nil?
begin
::Timeout.timeout(timeout) { yield }
rescue ::Timeout::Error
raise Timeout, "Lazy predicate builder timed out after #{timeout} seconds"
end
end
def timeout
PostgreSQL.config.predicate_builder.lazy_timeout
end
def limit
PostgreSQL.config.predicate_builder.lazy_limit
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/predicate_builder/regexp_handler.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module PredicateBuilder
class RegexpHandler
def initialize(predicate_builder)
@predicate_builder = predicate_builder
end
def call(attribute, value)
operator = value.casefold? ? :"~*" : :"~"
FN.infix(operator, attribute, FN.bind_with(attribute, value.source))
end
private
attr_reader :predicate_builder
end
end
end
end
================================================
FILE: lib/torque/postgresql/predicate_builder.rb
================================================
# frozen_string_literal: true
require_relative 'predicate_builder/array_handler'
require_relative 'predicate_builder/regexp_handler'
require_relative 'predicate_builder/arel_attribute_handler'
require_relative 'predicate_builder/enumerator_lazy_handler'
module Torque
module PostgreSQL
module PredicateBuilder
ARRAY_OID = ActiveRecord::ConnectionAdapters::PostgreSQL::OID::Array
def initialize(*)
super
handlers = Array.wrap(PostgreSQL.config.predicate_builder.enabled).inquiry
if handlers.regexp?
register_handler(Regexp, RegexpHandler.new(self))
end
if handlers.enumerator_lazy?
register_handler(Enumerator::Lazy, EnumeratorLazyHandler.new(self))
end
if handlers.arel_attribute?
register_handler(::Arel::Attributes::Attribute, ArelAttributeHandler.new(self))
end
end
end
::ActiveRecord::PredicateBuilder.prepend(PredicateBuilder)
end
end
================================================
FILE: lib/torque/postgresql/railtie.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
# = Torque PostgreSQL Railtie
class Railtie < Rails::Railtie # :nodoc:
# Get information from the running rails app
initializer 'torque-postgresql' do |app|
ActiveSupport.on_load(:active_record_postgresqladapter) do
ActiveSupport.on_load(:active_record) do
torque_config = Torque::PostgreSQL.config
torque_config.eager_load = app.config.eager_load
# TODO: Only load files that have their features enabled, like CTE
ar_type = ActiveRecord::Type
# Setup belongs_to_many association
ActiveRecord::Base.belongs_to_many_required_by_default =
torque_config.associations.belongs_to_many_required_by_default
## General features
if torque_config.join_series
require_relative 'relation/join_series'
Relation.include(Relation::JoinSeries)
end
if torque_config.buckets
require_relative 'relation/buckets'
Relation.include(Relation::Buckets)
end
## Schemas Enabled Setup
if (config = torque_config.schemas).enabled
require_relative 'adapter/schema_overrides'
end
## CTE Enabled Setup
if (config = torque_config.auxiliary_statement).enabled
require_relative 'auxiliary_statement'
require_relative 'relation/auxiliary_statement'
Relation.include(Relation::AuxiliaryStatement)
# Define the exposed constant for both types of auxiliary statements
if config.exposed_class.present?
*ns, name = config.exposed_class.split('::')
base = ns.present? ? ::Object.const_get(ns.join('::')) : ::Object
base.const_set(name, AuxiliaryStatement)
*ns, name = config.exposed_recursive_class.split('::')
base = ns.present? ? ::Object.const_get(ns.join('::')) : ::Object
base.const_set(name, AuxiliaryStatement::Recursive)
end
end
## Enum Enabled Setup
if (config = torque_config.enum).enabled
require_relative 'adapter/oid/enum'
require_relative 'adapter/oid/enum_set'
require_relative 'attributes/enum'
require_relative 'attributes/enum_set'
Attributes::Enum.include_on(ActiveRecord::Base)
Attributes::EnumSet.include_on(ActiveRecord::Base)
ar_type.register(:enum, Adapter::OID::Enum, adapter: :postgresql)
ar_type.register(:enum_set, Adapter::OID::EnumSet, adapter: :postgresql)
if config.namespace == false
# TODO: Allow enum classes to exist without a namespace
config.namespace = PostgreSQL.const_set('Enum', Module.new)
else
config.namespace ||= ::Object.const_set('Enum', Module.new)
# Define a method to find enumerators based on the namespace
config.namespace.define_singleton_method(:const_missing) do |name|
Attributes::Enum.lookup(name)
end
# Define a helper method to get a sample value
config.namespace.define_singleton_method(:sample) do |name|
Attributes::Enum.lookup(name).sample
end
end
end
## Geometry Enabled Setup
if (config = torque_config.geometry).enabled
require_relative 'adapter/oid/box'
require_relative 'adapter/oid/circle'
require_relative 'adapter/oid/line'
require_relative 'adapter/oid/segment'
ar_type.register(:box, Adapter::OID::Box, adapter: :postgresql)
ar_type.register(:circle, Adapter::OID::Circle, adapter: :postgresql)
ar_type.register(:line, Adapter::OID::Line, adapter: :postgresql)
ar_type.register(:segment, Adapter::OID::Segment, adapter: :postgresql)
end
## Period Enabled Setup
if (config = torque_config.period).enabled
require_relative 'attributes/period'
Attributes::Period.include_on(ActiveRecord::Base)
end
## Interval Enabled Setup
if (config = torque_config.interval).enabled
require_relative 'adapter/oid/interval'
ar_type.register(:interval, Adapter::OID::Interval, adapter: :postgresql)
end
## Full Text Search Enabled Setup
if (config = torque_config.full_text_search).enabled
require_relative 'attributes/full_text_search'
Attributes::FullTextSearch.include_on(ActiveRecord::Base)
end
## Arel Setup
PostgreSQL::Arel.build_operations(torque_config.arel.infix_operators)
if (mod = torque_config.arel.expose_function_helper_on&.to_s)
parent, _, name = mod.rpartition('::')
parent = parent ? parent.constantize : ::Object
raise ArgumentError, <<~MSG.squish if parent.const_defined?(name)
Unable to expose Arel function helper on #{mod} because the constant
#{name} is already defined on #{parent}. Please choose a different name.
MSG
parent.const_set(name, PostgreSQL::FN)
end
## Versioned Commands Setup
if (config = torque_config.versioned_commands).enabled
require_relative 'versioned_commands'
ActiveRecord::Schema::Definition.include(Adapter::Definition)
end
# Make sure to load all the types that are handled by this gem on
# each individual PG connection
adapter = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
ActiveRecord::Base.connection_handler.each_connection_pool do |pool|
next unless pool.db_config.adapter_class.is_a?(adapter)
pool.with_connection { |conn| conn.torque_load_additional_types }
end
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/abstract_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
module AbstractReflection
AREL_ATTR = ::Arel::Attributes::Attribute
AREL_NODE = ::Arel::Nodes::Node
# Check if the foreign key actually exists
def connected_through_array?
false
end
# Connection through an array-like attribute is more complex then just
# a simple eq. This needs to go through the channel that handles larger
# situations
def join_scope(table, foreign_table, foreign_klass)
return super unless connected_through_array?
table_md = ActiveRecord::TableMetadata.new(klass, table)
predicate_builder = klass.predicate_builder.with(table_md)
scope_chain_items = join_scopes(table, predicate_builder)
klass_scope = klass_join_scope(table, predicate_builder)
klass_scope.where!(build_id_constraint_between(table, foreign_table))
scope_chain_items.inject(klass_scope, &:merge!)
end
# Manually build the join constraint
def build_join_constraint(table, foreign_table)
result = build_id_constraint_between(table, foreign_table)
result = table.create_and([result, klass.send(:type_condition, table)]) \
if klass.finder_needs_type_condition?
result
end
private
# This one is a lot simpler, now that we have a predicate builder that
# knows exactly what to do with 2 array-like attributes
def build_id_constraint_between(table, foreign_table)
PredicateBuilder::ArelAttributeHandler.call(
table[join_primary_key],
foreign_table[join_foreign_key],
)
end
end
::ActiveRecord::Reflection::AbstractReflection.prepend(AbstractReflection)
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/association_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
module AssociationReflection
def initialize(name, scope, options, active_record)
super
raise ArgumentError, <<-MSG.squish if options[:array] && options[:polymorphic]
Associations can't be connected through an array at the same time they are
polymorphic. Please choose one of the options.
MSG
end
private
# Check if the foreign key should be pluralized
def derive_foreign_key(*, **)
result = super
result = ActiveSupport::Inflector.pluralize(result) \
if collection? && connected_through_array?
result
end
# returns either +nil+ or the inverse association name that it finds.
def automatic_inverse_of
return super unless connected_through_array?
if can_find_inverse_of_automatically?(self)
inverse_name = options[:as] || active_record.name.demodulize
inverse_name = ActiveSupport::Inflector.underscore(inverse_name)
inverse_name = ActiveSupport::Inflector.pluralize(inverse_name)
inverse_name = inverse_name.to_sym
begin
reflection = klass._reflect_on_association(inverse_name)
rescue NameError
# Give up: we couldn't compute the klass type so we won't be able
# to find any associations either.
reflection = false
end
return inverse_name if valid_inverse_reflection?(reflection)
end
end
end
::ActiveRecord::Reflection::AssociationReflection.prepend(AssociationReflection)
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/belongs_to_many_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
class BelongsToManyReflection < ::ActiveRecord::Reflection::AssociationReflection
def macro
:belongs_to_many
end
def connected_through_array?
true
end
def belongs_to?
true
end
def collection?
true
end
def association_class
Associations::BelongsToManyAssociation
end
def foreign_key
@foreign_key ||= options[:foreign_key]&.to_s || derive_foreign_key.freeze
end
def association_foreign_key
@association_foreign_key ||= foreign_key
end
def active_record_primary_key
@active_record_primary_key ||= options[:primary_key]&.to_s || derive_primary_key
end
def join_primary_key(*)
active_record_primary_key
end
def join_foreign_key
foreign_key
end
def array_attribute
active_record.arel_table[foreign_key]
end
private
def derive_primary_key
klass.primary_key
end
def derive_foreign_key
"#{name.to_s.singularize}_ids"
end
end
::ActiveRecord::Reflection.const_set(:BelongsToManyReflection, BelongsToManyReflection)
reflection_class = ::ActiveRecord::Reflection::AssociationReflection
reflection_class::VALID_AUTOMATIC_INVERSE_MACROS.push(:belongs_to_many) \
if reflection_class.const_defined?('VALID_AUTOMATIC_INVERSE_MACROS')
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/has_many_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
module HasManyReflection
def connected_through_array?
options[:array]
end
def array_attribute
klass.arel_table[foreign_key]
end
end
::ActiveRecord::Reflection::HasManyReflection.include(HasManyReflection)
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/runtime_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
module RuntimeReflection
delegate :klass, :active_record, :connected_through_array?, :macro, :name,
:array_attribute, to: :@reflection
end
::ActiveRecord::Reflection::RuntimeReflection.include(RuntimeReflection)
end
end
end
================================================
FILE: lib/torque/postgresql/reflection/through_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Reflection
module ThroughReflection
delegate :build_id_constraint, :connected_through_array?, to: :source_reflection
end
::ActiveRecord::Reflection::ThroughReflection.include(ThroughReflection)
end
end
end
================================================
FILE: lib/torque/postgresql/reflection.rb
================================================
# frozen_string_literal: true
require_relative 'reflection/abstract_reflection'
require_relative 'reflection/association_reflection'
require_relative 'reflection/belongs_to_many_reflection'
require_relative 'reflection/has_many_reflection'
require_relative 'reflection/runtime_reflection'
require_relative 'reflection/through_reflection'
module Torque
module PostgreSQL
module Reflection
def create(macro, name, scope, options, ar)
return super unless macro.eql?(:belongs_to_many)
BelongsToManyReflection.new(name, scope, options, ar)
end
end
::ActiveRecord::Reflection.singleton_class.prepend(Reflection)
end
end
================================================
FILE: lib/torque/postgresql/relation/auxiliary_statement.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module AuxiliaryStatement
# :nodoc:
def auxiliary_statements_values
@values.fetch(:auxiliary_statements, FROZEN_EMPTY_ARRAY)
end
# :nodoc:
def auxiliary_statements_values=(value)
assert_modifiable!
@values[:auxiliary_statements] = value
end
# Set use of an auxiliary statement
def with(*args, **settings)
spawn.with!(*args, **settings)
end
# Like #with, but modifies relation in place.
def with!(*args, **settings)
instantiate_auxiliary_statements(*args, **settings)
self
end
alias_method :auxiliary_statements, :with
alias_method :auxiliary_statements!, :with!
# Get all auxiliary statements bound attributes and the base bound
# attributes as well
def bound_attributes
visitor = ::Arel::Visitors::PostgreSQL.new(ActiveRecord::Base.connection)
visitor.accept(self.arel.ast, ::Arel::Collectors::Composite.new(
::Arel::Collectors::SQLString.new,
::Arel::Collectors::Bind.new,
)).value.last
end
private
# Hook arel build to add the distinct on clause
def build_arel(*)
arel = super
type = auxiliary_statement_type
sub_queries = build_auxiliary_statements(arel)
sub_queries.nil? ? arel : arel.with(*type, *sub_queries)
end
# Instantiate one or more auxiliary statements for the given +klass+
def instantiate_auxiliary_statements(*args, **options)
klass = PostgreSQL::AuxiliaryStatement
klass = klass::Recursive if options.delete(:recursive).present?
self.auxiliary_statements_values += args.map do |table|
if table.is_a?(Class) && table < klass
table.new(**options)
else
klass.instantiate(table, self, **options)
end
end
end
# Build all necessary data for auxiliary statements
def build_auxiliary_statements(arel)
return unless auxiliary_statements_values.present?
auxiliary_statements_values.map do |klass|
klass.build(self).tap { arel.join_sources.concat(klass.join_sources) }
end
end
# Return recursive if any auxiliary statement is recursive
def auxiliary_statement_type
klass = PostgreSQL::AuxiliaryStatement::Recursive
:recursive if auxiliary_statements_values.any?(klass)
end
# Throw an error showing that an auxiliary statement of the given
# table name isn't defined
def auxiliary_statement_error(name)
raise ArgumentError, <<-MSG.squish
There's no '#{name}' auxiliary statement defined for #{self.class.name}.
MSG
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/relation/buckets.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module Buckets
# :nodoc:
def buckets_value
@values.fetch(:buckets, nil)
end
# :nodoc:
def buckets_value=(value)
assert_modifiable!
@values[:buckets] = value
end
# Specifies how to bucket records. It works for both the calculations
# or just putting records into groups. For example:
#
# User.buckets(:created_at, [1.year.ago, 1.month.ago, 1.week.ago])
# # Returns all users grouped by created_at in the given time ranges
#
# User.buckets(:age, 0..100, step: 10).count
# # Counts all users grouped by age buckets of 10 years
def buckets(*value, **xargs)
spawn.buckets!(*value, **xargs)
end
# Like #buckets, but modifies relation in place.
def buckets!(attribute, values, count: nil, cast: nil, as: nil)
raise ArgumentError, <<~MSG.squish if !values.is_a?(Array) && !values.is_a?(Range)
Buckets must be an array or a range.
MSG
count ||= 1 if values.is_a?(Range)
attribute = arel_table[attribute] unless ::Arel.arel_node?(attribute)
self.buckets_value = [attribute, values, count, cast, as]
self
end
# When performing calculations with buckets, this method add a grouping
# clause to the query by the bucket values, and then adjust the keys
# to match provided values
def calculate(*)
return super if buckets_value.blank?
raise ArgumentError, <<~MSG.squish if group_values.present?
Cannot calculate with buckets when there are already group values.
MSG
keys = buckets_keys
self.group_values = [FN.group_by(build_buckets_node, :bucket)]
super.transform_keys { |key| keys[key - 1] }
end
module Initializer
# Hook into the output of records to make sure we group by the buckets
def records
return super if buckets_value.blank?
keys = buckets_keys
col = buckets_column
super.group_by do |record|
val = (record[col] || 0) - 1
keys[val] if val >= 0 && val < keys.size
end
end
end
private
# Hook arel build to add the column
def build_arel(*)
return super if buckets_value.blank? || select_values.present?
self.select_extra_values += [build_buckets_node.as(buckets_column)]
super
end
# Build the Arel node for the buckets function
def build_buckets_node
attribute, values, count, cast, * = buckets_value
if values.is_a?(Range)
FN.width_bucket(
attribute,
FN.bind_type(values.begin, name: 'bucket_start', cast: 'numeric'),
FN.bind_type(values.end, name: 'bucket_end', cast: 'numeric'),
FN.bind_type(count, name: 'bucket_count', cast: 'integer'),
)
else
FN.width_bucket(attribute, ::Arel.array(values, cast: cast))
end
end
# Returns the column used for buckets, if any
def buckets_column
buckets_value.last&.to_s || 'bucket'
end
# Transform a range into the proper keys for buckets
def buckets_keys
keys = buckets_value.second
return keys unless keys.is_a?(Range)
left = nil
step = buckets_value.third
step = (keys.end - keys.begin).fdiv(step)
step = step.to_i if step.to_i == step
keys.step(step).each_with_object([]) do |right, result|
next left = right if left.nil?
start, left = left, right
result << Range.new(start, left, true)
end
end
end
Initializer.include(Buckets::Initializer)
end
end
end
================================================
FILE: lib/torque/postgresql/relation/distinct_on.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module DistinctOn
# :nodoc:
def distinct_on_values
@values.fetch(:distinct_on, FROZEN_EMPTY_ARRAY)
end
# :nodoc:
def distinct_on_values=(value)
assert_modifiable!
@values[:distinct_on] = value
end
# Specifies whether the records should be unique or not by a given set
# of fields. For example:
#
# User.distinct_on(:name)
# # Returns 1 record per distinct name
#
# User.distinct_on(:name, :email)
# # Returns 1 record per distinct name and email
#
# User.distinct_on(false)
# # You can also remove the uniqueness
def distinct_on(*value)
spawn.distinct_on!(*value)
end
# Like #distinct_on, but modifies relation in place.
def distinct_on!(*value)
self.distinct_on_values = value
self
end
private
# Hook arel build to add the distinct on clause
def build_arel(*)
arel = super
value = self.distinct_on_values
arel.distinct_on(resolve_column(value)) if value.present?
arel
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/relation/inheritance.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module Inheritance
# :nodoc:
def cast_records_values
@values.fetch(:cast_records, FROZEN_EMPTY_ARRAY)
end
# :nodoc:
def cast_records_values=(value)
assert_modifiable!
@values[:cast_records] = value
end
# :nodoc:
def itself_only_value
@values.fetch(:itself_only, nil)
end
# :nodoc:
def itself_only_value=(value)
assert_modifiable!
@values[:itself_only] = value
end
delegate :quote_table_name, :quote_column_name, to: :connection
# Specify that the results should come only from the table that the
# entries were created on. For example:
#
# Activity.itself_only
# # Does not return entries for inherited tables
def itself_only
spawn.itself_only!
end
# Like #itself_only, but modifies relation in place.
def itself_only!(*)
self.itself_only_value = true
self
end
# Enables the casting of all returned records. The result will include
# all the information needed to instantiate the inherited models
#
# Activity.cast_records
# # The result list will have many different classes, for all
# # inherited models of activities
def cast_records(*types, **options)
spawn.cast_records!(*types, **options)
end
# Like #cast_records, but modifies relation in place
def cast_records!(*types, **options)
where!(regclass.pg_cast(:varchar).in(types.map(&:table_name))) if options[:filter]
self.select_extra_values += [regclass.as(_record_class_attribute.to_s)]
self.cast_records_values = (types.present? ? types : model.casted_dependents.values)
self
end
private
# Hook arel build to add any necessary table
def build_arel(*)
arel = super
arel.only if self.itself_only_value === true
build_inheritances(arel)
arel
end
# Build all necessary data for inheritances
def build_inheritances(arel)
return if self.cast_records_values.empty?
mergeable = inheritance_mergeable_attributes
columns = build_inheritances_joins(arel, self.cast_records_values)
columns = columns.map do |column, arel_tables|
next arel_tables.first[column] if arel_tables.size == 1
if mergeable.include?(column)
FN.coalesce(*arel_tables.each_with_object(column).map(&:[])).as(column)
else
arel_tables.map { |table| table[column].as("#{table.left.name}__#{column}") }
end
end
columns.push(build_auto_caster_marker(arel, self.cast_records_values))
self.select_extra_values += columns.flatten if columns.any?
end
# Build as many left outer join as necessary for each dependent table
def build_inheritances_joins(arel, types)
columns = Hash.new{ |h, k| h[k] = [] }
base_on_key = model.arel_table[primary_key]
base_attributes = model.attribute_names
# Iterate over each casted dependent calculating the columns
types.each.with_index do |model, idx|
join_table = model.arel_table.alias("\"i_#{idx}\"")
arel.outer_join(join_table).on(base_on_key.eq(join_table[primary_key]))
(model.attribute_names - base_attributes).each do |column|
columns[column] << join_table
end
end
# Return the list of needed columns
columns.default_proc = nil
columns
end
def build_auto_caster_marker(arel, types)
attribute = regclass.pg_cast(:varchar).in(types.map(&:table_name))
attribute.as(self.class._auto_cast_attribute.to_s)
end
def regclass
arel_table['tableoid'].pg_cast(:regclass)
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/relation/join_series.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module JoinSeries
# Create the proper arel join
class << self
def build(relation, range, with: nil, as: :series, step: nil, time_zone: nil, cast: nil, mode: :inner, &block)
validate_build!(range, step)
args = [bind_value(range.begin), bind_value(range.end)]
args << bind_value(step) if step
args << bind_value(time_zone) if time_zone
result = Arel::Nodes::Ref.new(as.to_s)
func = FN.generate_series(*args).as(as.to_s)
condition = build_join_on(result, relation, with, cast, &block)
arel_join(mode).new(func, func.create_on(condition))
end
private
# Make sure we have a viable range
def validate_build!(range, step)
raise ArgumentError, <<~MSG.squish unless range.is_a?(Range)
Value must be a Range.
MSG
raise ArgumentError, <<~MSG.squish if range.begin.nil?
Beginless Ranges are not supported.
MSG
raise ArgumentError, <<~MSG.squish if range.end.nil?
Endless Ranges are not supported.
MSG
raise ArgumentError, <<~MSG.squish if !range.begin.is_a?(Numeric) && step.nil?
missing keyword: :step
MSG
end
# Creates the proper bind value
def bind_value(value)
case value
when Integer
FN.bind_type(value, :integer, name: 'series', cast: 'integer')
when Float
FN.bind_type(value, :float, name: 'series', cast: 'numeric')
when String
FN.bind_type(value, :string, name: 'series', cast: 'text')
when ActiveSupport::TimeWithZone
FN.bind_type(value, :time, name: 'series', cast: 'timestamptz')
when Time
FN.bind_type(value, :time, name: 'series', cast: 'timestamp')
when DateTime
FN.bind_type(value, :datetime, name: 'series', cast: 'timestamp')
when ActiveSupport::Duration
FN.bind_type(value.iso8601, :string, name: 'series', cast: 'interval')
when Date then bind_value(value.to_time(:utc))
when ::Arel::Attributes::Attribute then value
else
raise ArgumentError, "Unsupported value type: #{value.class}"
end
end
# Get the class of the join on arel
def arel_join(mode)
case mode.to_sym
when :inner then ::Arel::Nodes::InnerJoin
when :left then ::Arel::Nodes::OuterJoin
when :right then ::Arel::Nodes::RightOuterJoin
when :full then ::Arel::Nodes::FullOuterJoin
else
raise ArgumentError, <<-MSG.squish
The '#{mode}' is not implemented as a join type.
MSG
end
end
# Build the join on clause
def build_join_on(result, relation, with, cast)
raise ArgumentError, <<~MSG.squish if with.nil? && !block_given?
missing keyword: :with
MSG
return yield(result, relation.arel_table) if block_given?
with = relation.arel_table[with.to_s] if with.is_a?(Symbol)
with = with.pg_cast(cast) if cast && with.respond_to?(:pg_cast)
(cast ? result.pg_cast(cast) : result).eq(with)
end
end
# Creates a new join based on PG +generate_series()+ function. It is
# based on ranges, supports numbers and dates (as per PG documentation),
# custom stepping, time zones, and more. This simply coordinates the
# initialization of the the proper join
def join_series(range, **xargs, &block)
spawn.join_series!(range, **xargs, &block)
end
# Like #join_series, but modifies relation in place.
def join_series!(range, **xargs, &block)
self.joins_values |= [JoinSeries.build(self, range, **xargs, &block)]
self
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/relation/merger.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module Relation
module Merger
def merge # :nodoc:
super
merge_select_extra
merge_distinct_on
merge_auxiliary_statements
merge_inheritance
merge_buckets
relation
end
private
# Merge extra select columns
def merge_select_extra
relation.select_extra_values.concat(other.select_extra_values).uniq! \
if other.select_extra_values.present?
end
# Merge distinct on columns
def merge_distinct_on
return unless relation.is_a?(Relation::DistinctOn)
return if other.distinct_on_values.blank?
relation.distinct_on_values += other.distinct_on_values
end
# Merge auxiliary statements activated by +with+
def merge_auxiliary_statements
return unless defined?(Relation::AuxiliaryStatement) && relation.is_a?(Relation::AuxiliaryStatement)
return if other.auxiliary_statements_values.blank?
current = relation.auxiliary_statements_values.map{ |cte| cte.class }
other.auxiliary_statements_values.each do |other|
next if current.include?(other.class)
relation.auxiliary_statements_values += [other]
current << other.class
end
end
# Merge settings related to inheritance tables
def merge_inheritance
return unless relation.is_a?(Relation::Inheritance)
relation.itself_only_value = true if other.itself_only_value.present?
if other.cast_records_values.present?
relation.cast_records_values += other.cast_records_values
relation.cast_records_values.uniq!
end
end
# Merge settings related to buckets
def merge_buckets
return unless defined?(Relation::Buckets) && relation.is_a?(Relation::Buckets)
return if other.buckets_value.blank?
relation.buckets_value = other.buckets_value
end
end
ActiveRecord::Relation::Merger.prepend Merger
end
end
end
================================================
FILE: lib/torque/postgresql/relation.rb
================================================
# frozen_string_literal: true
require_relative 'relation/distinct_on'
require_relative 'relation/inheritance'
require_relative 'relation/merger'
module Torque
module PostgreSQL
module Relation
extend ActiveSupport::Concern
include DistinctOn
include Inheritance
SINGLE_VALUE_METHODS = %i[itself_only buckets]
MULTI_VALUE_METHODS = %i[
select_extra distinct_on auxiliary_statements cast_records
]
VALUE_METHODS = SINGLE_VALUE_METHODS + MULTI_VALUE_METHODS
FROZEN_EMPTY_ARRAY = ::ActiveRecord::QueryMethods::FROZEN_EMPTY_ARRAY
ARColumn = ::ActiveRecord::ConnectionAdapters::PostgreSQL::Column
# :nodoc:
def select_extra_values
@values.fetch(:select_extra, FROZEN_EMPTY_ARRAY)
end
# :nodoc:
def select_extra_values=(value)
assert_modifiable!
@values[:select_extra] = value
end
# Resolve column name when calculating models, allowing the column name to
# be more complex while keeping the query selection quality
def calculate(operation, column_name)
column_name = resolve_column(column_name).first if column_name.is_a?(Hash)
super(operation, column_name)
end
# Resolve column definition up to second value.
# For example, based on Post model:
#
# resolve_column(['name', :title])
# # Returns ['name', '"posts"."title"']
#
# resolve_column([:title, {authors: :name}])
# # Returns ['"posts"."title"', '"authors"."name"']
#
# resolve_column([{authors: [:name, :age]}])
# # Returns ['"authors"."name"', '"authors"."age"']
def resolve_column(list, base = false)
base = resolve_base_table(base)
Array.wrap(list).map do |item|
case item
when String
::Arel.sql(klass.send(:sanitize_sql, item.to_s))
when Symbol
base ? base.arel_table[item] : klass.arel_table[item]
when Array
resolve_column(item, base)
when Hash
raise ArgumentError, 'Unsupported Hash for attributes on third level' if base
item.map { |key, other_list| resolve_column(other_list, key) }
else
raise ArgumentError, "Unsupported argument type: #{value} (#{value.class})"
end
end.flatten
end
# Get the TableMetadata from a relation
def resolve_base_table(relation)
return unless relation
table = predicate_builder.send(:table)
if table.associated_with?(relation.to_s)
table.associated_table(relation.to_s).send(:klass)
else
raise ArgumentError, "Relation for #{relation} not found on #{klass}"
end
end
# Serialize the given value so it can be used in a condition tha involves
# the given column
def cast_for_condition(column, value)
column = columns_hash[column.to_s] unless column.is_a?(ARColumn)
caster = connection.lookup_cast_type_from_column(column)
connection.type_cast(caster.serialize(value))
end
private
def build_arel(*)
arel = super
arel.project(*select_extra_values) if select_values.blank?
arel
end
class_methods do
# Easy and storable way to access the name used to get the record table
# name when using inheritance tables
def _record_class_attribute
@@record_class ||= Torque::PostgreSQL.config
.inheritance.record_class_column_name.to_sym
end
# Easy and storable way to access the name used to get the indicate of
# auto casting inherited records
def _auto_cast_attribute
@@auto_cast ||= Torque::PostgreSQL.config
.inheritance.auto_cast_column_name.to_sym
end
end
# When a relation is created, force the attributes to be defined,
# because the type mapper may add new methods to the model. This happens
# for the given model Klass and its inheritances
module Initializer
def initialize(klass, *, **)
super
klass.superclass.send(:relation) if klass.define_attribute_methods &&
klass.superclass != ActiveRecord::Base && !klass.superclass.abstract_class?
end
# Allow extra keyword arguments to be sent to +InsertAll+
def upsert_all(attributes, **xargs)
xargs = xargs.reverse_merge(on_duplicate: :update)
::ActiveRecord::InsertAll.execute(self, attributes, **xargs)
end
end
end
# Include the methods here provided and then change the constants to ensure
# the operation of ActiveRecord Relation
ActiveRecord::Relation.include Relation
ActiveRecord::Relation.prepend Relation::Initializer
ActiveRecord::Relation::SINGLE_VALUE_METHODS.concat(Relation::SINGLE_VALUE_METHODS)
ActiveRecord::Relation::MULTI_VALUE_METHODS.concat(Relation::MULTI_VALUE_METHODS)
ActiveRecord::Relation::VALUE_METHODS.concat(Relation::VALUE_METHODS)
ActiveRecord::QueryMethods::VALID_UNSCOPING_VALUES.merge(%i[cast_records itself_only
distinct_on auxiliary_statements buckets])
end
end
================================================
FILE: lib/torque/postgresql/schema_cache/bound_schema_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module BoundSchemaReflection
def add_model_name(table_name, model)
source = defined?(@pool) ? @pool : @connection
@schema_reflection.add_model_name(source, table_name, model)
end
def dependencies(table_name)
source = defined?(@pool) ? @pool : @connection
@schema_reflection.dependencies(source, table_name)
end
def associations(table_name)
source = defined?(@pool) ? @pool : @connection
@schema_reflection.associations(source, table_name)
end
def lookup_model(table_name, scoped_class = '')
source = defined?(@pool) ? @pool : @connection
@schema_reflection.lookup_model(source, table_name, scoped_class)
end
end
ActiveRecord::ConnectionAdapters::BoundSchemaReflection.prepend BoundSchemaReflection
end
end
================================================
FILE: lib/torque/postgresql/schema_cache/inheritance.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module SchemaCache
module Inheritance
# Try to find a model based on a given table
def lookup_model(table_name, scoped_class = '', source_to_model:)
scoped_class = scoped_class.name if scoped_class.is_a?(Class)
return source_to_model[table_name] if source_to_model.key?(table_name)
# Get all the possible scopes
scopes = scoped_class.scan(/(?:::)?[A-Z][a-z]+/)
scopes.unshift('Object::')
# Check if the table name comes with a schema
if table_name.include?('.')
schema, table_name = table_name.split('.')
scopes.insert(1, schema.camelize) if schema != 'public'
end
# Consider the maximum namespaced possible model name
max_name = table_name.tr('_', '/').camelize.split(/(::)/)
max_name[-1] = max_name[-1].singularize
# Test all the possible names against all the possible scopes
until scopes.size == 0
scope = scopes.join.chomp('::').safe_constantize
model = find_model(max_name, table_name, scope) unless scope.nil?
return source_to_model[table_name] = model unless model.nil?
scopes.pop
end
# If this part is reach, no model name was found
raise LookupError.new(<<~MSG.squish)
Unable to find a valid model that is associated with the
'#{table_name}' table. Please, check if they correctly inherit from
ActiveRecord::Base
MSG
end
protected
# Find a model by a given max namespaced class name that matches the
# given table name
def find_model(max_name, table_name, scope = Object)
pieces = max_name.is_a?(::Array) ? max_name : max_name.split(/(::)/)
ns_places = (1..(max_name.size - 1)).step(2).to_a
# Generate all possible combinations
conditions = []
range = Torque::PostgreSQL.config.inheritance.inverse_lookup \
? 0.upto(ns_places.size) \
: ns_places.size.downto(0)
range.each do |size|
conditions.concat(ns_places.combination(size).to_a)
end
# Now iterate over
while (condition = conditions.shift)
ns_places.each do |i|
pieces[i] = condition.include?(i) ? '::' : ''
end
candidate = pieces.join
candidate.prepend("#{scope.name}::") unless scope === Object
klass = candidate.safe_constantize
next if klass.nil?
# Check if the class match the table name
return klass if klass < ::ActiveRecord::Base &&
klass.table_name == table_name
end
end
# Calculates the inverted dependency (association), where even indirect
# inheritance comes up in the list
def generate_associations(inheritance_dependencies)
return {} if inheritance_dependencies.empty?
result = Hash.new{ |h, k| h[k] = [] }
masters = inheritance_dependencies.values.flatten.uniq
# Add direct associations
masters.map do |master|
inheritance_dependencies.each do |(dependent, associations)|
result[master] << dependent if associations.include?(master)
end
end
# Add indirect associations
result.each do |master, children|
children.each do |child|
children.concat(result[child]).uniq! if result.key?(child)
end
end
# Remove the default proc that would create new entries
result.default_proc = nil
result
end
# Parse the Torque config into the proper hash of irregular models.
# This is smart enough to only load necessary models
def prepare_irregular_models(data_sources)
entries = Torque::PostgreSQL.config.irregular_models
entries.slice(*data_sources).each_with_object({}) do |(table, model), hash|
hash[table] = model.is_a?(Class) ? model : model.constantize
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/schema_cache/schema_reflection.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module SchemaReflection
def add_model_name(source, table_name, model)
cache(source).add_model_name(source, table_name, model)
end
def dependencies(source, table_name)
cache(source).dependencies(source, table_name)
end
def associations(source, table_name)
cache(source).associations(source, table_name)
end
def lookup_model(source, table_name, scoped_class)
cache(source).lookup_model(table_name, scoped_class)
end
end
ActiveRecord::ConnectionAdapters::SchemaReflection.prepend SchemaReflection
end
end
================================================
FILE: lib/torque/postgresql/schema_cache.rb
================================================
# frozen_string_literal: true
require 'torque/postgresql/schema_cache/inheritance'
require 'torque/postgresql/schema_cache/schema_reflection'
require 'torque/postgresql/schema_cache/bound_schema_reflection'
module Torque
module PostgreSQL
LookupError = Class.new(ArgumentError)
# :TODO: Create the +add+ to load inheritance info
module SchemaCache
include Torque::PostgreSQL::SchemaCache::Inheritance
def initialize(*) # :nodoc:
super
@data_sources_model_names = {}
@inheritance_dependencies = {}
@inheritance_associations = {}
@inheritance_loaded = false
end
def initialize_dup(*) # :nodoc:
super
@data_sources_model_names = @data_sources_model_names.dup
@inheritance_dependencies = @inheritance_dependencies.dup
@inheritance_associations = @inheritance_associations.dup
end
def encode_with(coder) # :nodoc:
super
coder['data_sources_model_names'] = @data_sources_model_names
coder['inheritance_dependencies'] = @inheritance_dependencies
coder['inheritance_associations'] = @inheritance_associations
end
def init_with(coder) # :nodoc:
super
@data_sources_model_names = coder['data_sources_model_names']
@inheritance_dependencies = coder['inheritance_dependencies']
@inheritance_associations = coder['inheritance_associations']
end
def add(connection_or_table_name, table_name = connection_or_table_name, *) # :nodoc:
super
# Reset inheritance information when a table is added
if @data_sources.key?(table_name)
@inheritance_dependencies.clear
@inheritance_associations.clear
@inheritance_loaded = false
end
end
def clear! # :nodoc:
super
@data_sources_model_names.clear
@inheritance_dependencies.clear
@inheritance_associations.clear
@inheritance_loaded = false
end
def size # :nodoc:
super + [
@data_sources_model_names,
@inheritance_dependencies,
@inheritance_associations,
].map(&:size).inject(:+)
end
def clear_data_source_cache!(connection_or_name, name = connection_or_name) # :nodoc:
super
@data_sources_model_names.delete name
@inheritance_dependencies.delete name
@inheritance_associations.delete name
end
def marshal_dump # :nodoc:
super + [
@inheritance_dependencies,
@inheritance_associations,
@data_sources_model_names,
@inheritance_loaded,
]
end
def marshal_load(array) # :nodoc:
@inheritance_loaded = array.pop
@data_sources_model_names = array.pop
@inheritance_associations = array.pop
@inheritance_dependencies = array.pop
super
end
# A way to manually add models name so it doesn't need the lookup method
def add_model_name(*args)
model, *source = args.reverse
return unless data_source_exists?(*source.reverse) && model.is_a?(Class)
@data_sources_model_names[source.first] = model
end
# Get all the tables that the given one inherits from
def dependencies(source, table_name = source)
reload_inheritance_data!(source == table_name ? connection : source)
@inheritance_dependencies[table_name]
end
# Get the list of all tables that are associated (direct or indirect
# inheritance) with the provided one
def associations(source, table_name = source)
reload_inheritance_data!(source == table_name ? connection : source)
@inheritance_associations[table_name]
end
# Override the inheritance implementation to pass over the proper cache of
# the existing association between data sources and model names
def lookup_model(*args, **xargs)
super(*args, **xargs, source_to_model: @data_sources_model_names)
end
private
# Reload information about tables inheritance and dependencies, uses a
# cache to not perform additional checks
def reload_inheritance_data!(source)
return if @inheritance_loaded
source.with_connection do |connection|
@inheritance_dependencies = connection.inherited_tables
@inheritance_associations = generate_associations
@inheritance_loaded = true
end
end
# Calculates the inverted dependency (association), where even indirect
# inheritance comes up in the list
def generate_associations
super(@inheritance_dependencies)
end
# Use this method to also load any irregular model name
def add_all(source = nil)
super
data_sources = source.present? ? tables_to_cache(source) : @data_sources.keys
@data_sources_model_names = prepare_irregular_models(data_sources)
end
end
ActiveRecord::ConnectionAdapters::SchemaCache.prepend SchemaCache
end
end
================================================
FILE: lib/torque/postgresql/table_name.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
class TableName < Delegator
def initialize(klass, table_name)
@klass = klass
@table_name = table_name
end
def schema
return @schema if defined?(@schema)
@schema = ([@klass] + @klass.module_parents[0..-2]).find do |klass|
next unless klass.respond_to?(:schema) && !(value = klass.schema).nil?
break value
end
end
def to_s
schema.nil? ? @table_name : "#{schema}.#{@table_name}"
end
alias __getobj__ to_s
def ==(other)
other.to_s =~ /("?#{schema | search_path_schemes.join('|')}"?\.)?"?#{@table_name}"?/
end
def __setobj__(value)
@table_name = value
end
private
def search_path_schemes
klass.connection.schemas_search_path_sanitized
end
end
end
end
================================================
FILE: lib/torque/postgresql/version.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
VERSION = '4.0.1'
end
end
================================================
FILE: lib/torque/postgresql/versioned_commands/command_migration.rb
================================================
# frozen_string_literal: true
module Torque
module PostgreSQL
module VersionedCommands
module Migration
def initialize(*args)
@command = args.pop
super(*args)
end
# Prepare the description based on the direction
def migrate(direction)
@description = description_for(direction)
super
end
# Uses the command to execute the proper action
def exec_migration(conn, direction)
@connection = conn
direction == :up ? @command.up : @command.down
ensure
@connection = nil
@execution_strategy = nil
end
# Better formatting of the output
def announce(message)
action, result = @description
title = [
@command.type.capitalize,
@command.object_name,
"v#{@command.op_version}"
].join(' ')
timing = message.split(' ', 2).second
action = "#{result} #{timing}" if timing.present?
text = "#{@command.version} #{title}: #{action}"
length = [0, 75 - text.length].max
write "== %s %s" % [text, "=" * length]
end
# Produces a nice description of what is being done
def description_for(direction)
base = @command.op.chomp('e') if direction == :up
base ||=
case @command.op
when 'create' then 'dropp'
when 'update' then 'revert'
when 'remove' then 're-creat'
end
["#{base}ing", "#{base}ed"]
end
# Print the command and then execute it
def execute(command)
write "-- #{command.gsub(/(? command.version,
arel_table['type'] => command.type,
arel_table['object_name'] => command.object_name,
)
@pool.with_connection do |connection|
connection.insert(im, "#{name} Create", primary_key, command.version)
end
end
def delete_version(command)
dm = ::Arel::DeleteManager.new(arel_table)
dm.wheres = [arel_table[primary_key].eq(command.version.to_s)]
@pool.with_connection do |connection|
connection.delete(dm, "#{name} Destroy")
end
end
def primary_key
'version'
end
def name
'Torque::PostgreSQL::VersionedCommand'
end
def table_name
[
ActiveRecord::Base.table_name_prefix,
PostgreSQL.config.versioned_commands.table_name,
ActiveRecord::Base.table_name_suffix,
].join
end
def create_table
@pool.with_connection do |connection|
return if connection.table_exists?(table_name)
parent = @pool.schema_migration.table_name
connection.create_table(table_name, inherits: parent) do |t|
t.string :type, null: false, index: true
t.string :object_name, null: false, index: true
end
end
end
def drop_table
@pool.with_connection do |connection|
connection.drop_table table_name, if_exists: true
end
end
def count
return 0 unless table_exists?
sm = ::Arel::SelectManager.new(arel_table)
sm.project(*FN.count(::Arel.star))
@pool.with_connection do |connection|
connection.select_value(sm, "#{self.class} Count")
end
end
def table_exists?
@pool.with_connection { |connection| connection.data_source_exists?(table_name) }
end
def versions_of(type)
return [] unless table_exists?
sm = ::Arel::SelectManager.new(arel_table)
sm.project(arel_table['object_name'], FN.count(::Arel.star).as('version'))
sm.where(arel_table['type'].eq(type.to_s))
sm.group(arel_table['object_name'])
sm.order(arel_table['object_name'].asc)
@pool.with_connection do |connection|
connection.select_rows(sm, "#{name} Load")
end
end
end
end
end
end
================================================
FILE: lib/torque/postgresql/versioned_commands.rb
================================================
# frozen_string_literal: true
require_relative 'versioned_commands/command_migration'
require_relative 'versioned_commands/migration_context'
require_relative 'versioned_commands/migrator'
require_relative 'versioned_commands/schema_table'
module Torque
module PostgreSQL
# Takes advantage of Rails migrations to create other sorts of
# objects/commands that can also be versioned. Everything migrated will
# still live within Migrations borders (i.e., the schema_migrations), but
# the way they are handled and registered in the schema dumper is completely
# different
module VersionedCommands
RAILS_APP = defined?(Rails.application.paths)
NAME_MATCH = '"?((?:[_a-z0-9]+"?\."?)?[_a-z0-9]+)"?'
class << self
# Check if the type is current enabled
def valid_type?(type)
PostgreSQL.config.versioned_commands.types.include?(type.to_sym)
end
# Run the internal validations for the given type and content
def validate!(type, content, name)
method_name = :"validate_#{type}!"
return send(method_name, content, name) if valid_type?(type)
raise ArgumentError, "Unknown versioned command type: #{type}"
end
# Get the content of the command based on the type, name, and version
def fetch_command(dirs, type, name, version)
paths = Array.wrap(dirs).map { |d| "#{d}/**/*_#{type}_#{name}_v#{version}.sql" }
files = Dir[*paths]
return File.read(files.first) if files.one?
raise ArgumentError, <<~MSG.squish if files.none?
No previous version found for #{type} #{name}
of version v#{version}.
MSG
raise ArgumentError, <<~MSG.squish if files.many?
Multiple files found for #{type} #{name}
of version v#{version}.
MSG
end
# The regexp is dynamic due to the list of available types
def filename_regexp
@filename_regexp ||= begin
types = PostgreSQL.config.versioned_commands.types
Regexp.new([
"\\A([0-9]+)_",
"(create|update|remove)_",
"(#{types.join('|')})_",
"([_a-z0-9]*)",
"_v([0-9]+)",
"\\.?([_a-z0-9]*)?",
"\\.sql\\z",
].join)
end
end
private
# Validate that the content of the command is correct
def validate_function!(content, name)
result = content.scan(Regexp.new([
'^\s*CREATE\s+(OR\s+REPLACE)?\s*',
"FUNCTION\\s+#{NAME_MATCH}",
].join, 'mi'))
names = result.map(&:last).compact.uniq(&:downcase)
raise ArgumentError, <<~MSG.squish if names.size > 1
Multiple functions definition found.
MSG
raise ArgumentError, <<~MSG.squish unless result.all?(&:first)
'OR REPLACE' is required for proper migration support.
MSG
fn_name = names.first.downcase.sub('.', '_')
raise ArgumentError, <<~MSG.squish if fn_name != name.downcase
Function name must match file name.
MSG
end
# Validate that the content of the command is correct
def validate_type!(content, name)
creates = content.scan(Regexp.new(['^\s*CREATE\s+TYPE\s+', NAME_MATCH].join, 'mi'))
drops = content.scan(Regexp.new([
'^\s*DROP\s+TYPE\s+(IF\s+EXISTS)?\s*',
NAME_MATCH,
].join, 'mi'))
raise ArgumentError, <<~MSG.squish if creates.size > 1
More than one type definition found.
MSG
raise ArgumentError, <<~MSG.squish if drops.size > 1
More than one type drop found.
MSG
raise ArgumentError, <<~MSG.squish if drops.empty?
'DROP TYPE' is required for proper migration support.
MSG
create_name = creates.first.last.downcase
raise ArgumentError, <<~MSG.squish if drops.first.last.downcase != create_name
Drop does not match create.
MSG
create_name = create_name.sub('.', '_')
raise ArgumentError, <<~MSG.squish if create_name != name.downcase
Type name must match file name.
MSG
end
# Validate that the content of the command is correct
def validate_view!(content, name)
result = content.scan(Regexp.new([
'^\s*CREATE\s+(OR\s+REPLACE)?\s*',
'((?:TEMP|TEMPORARY|MATERIALIZED)\s+)?',
'(?:RECURSIVE\s+)?',
"VIEW\\s+#{NAME_MATCH}",
].join, 'mi'))
raise ArgumentError, <<~MSG.squish if result.empty?
Missing or invalid view definition.
MSG
raise ArgumentError, <<~MSG.squish if result.size > 1
More than one view definition found.
MSG
with_replace, opt, view_name = result.first
if opt&.strip == 'MATERIALIZED'
raise ArgumentError, <<~MSG.squish if with_replace.present?
Materialized view does not support 'OR REPLACE'.
MSG
with_drop = "DROP MATERIALIZED VIEW IF EXISTS #{view_name};"
raise ArgumentError, <<~MSG.squish unless content.include?(with_drop)
'DROP MATERIALIZED VIEW IF EXISTS' is required for proper migration support.
MSG
else
raise ArgumentError, <<~MSG.squish if with_replace.blank?
'OR REPLACE' is required for proper migration support.
MSG
end
view_name = view_name.downcase.sub('.', '_')
raise ArgumentError, <<~MSG.squish if view_name != name.downcase
View name must match file name.
MSG
end
end
end
end
end
================================================
FILE: lib/torque/postgresql.rb
================================================
require 'i18n'
require 'ostruct'
require 'active_model'
require 'active_record'
require 'active_support'
require 'active_support/core_ext/date/acts_like'
require 'active_support/core_ext/time/zones'
require 'active_record/connection_adapters/postgresql_adapter'
require 'torque/postgresql/config'
require 'torque/postgresql/version'
require 'torque/postgresql/collector'
require 'torque/postgresql/geometry_builder'
require 'torque/postgresql/predicate_builder'
require 'torque/postgresql/i18n'
require 'torque/postgresql/arel'
require 'torque/postgresql/adapter'
require 'torque/postgresql/associations'
require 'torque/postgresql/attributes'
require 'torque/postgresql/autosave_association'
require 'torque/postgresql/inheritance'
require 'torque/postgresql/base' # Needs to be after inheritance
require 'torque/postgresql/insert_all'
require 'torque/postgresql/migration'
require 'torque/postgresql/relation'
require 'torque/postgresql/reflection'
require 'torque/postgresql/schema_cache'
require 'torque/postgresql/table_name'
require 'torque/postgresql/function'
require 'torque/postgresql/railtie' if defined?(Rails)
================================================
FILE: lib/torque-postgresql.rb
================================================
require 'torque/postgresql'
================================================
FILE: spec/en.yml
================================================
en:
torque: 'Torque Rocks!'
activerecord:
attributes:
user:
role:
visitor: 'A simple Visitor'
role:
assistant: 'An Assistant'
enums:
content_status:
created: '1 - Created'
roles:
manager: 'The Manager'
enum:
content_status:
draft: 'Draft (2)'
published: 'Finally published'
admin: 'Super Duper Admin'
================================================
FILE: spec/factories/authors.rb
================================================
FactoryBot.define do
factory :author do
name { Faker::Name.name }
specialty { Enum::Specialties.values.sample }
end
end
================================================
FILE: spec/factories/comments.rb
================================================
FactoryBot.define do
factory :comment do
content { Faker::Lorem.paragraph }
factory :comment_recursive do
comment_id { Comment.order('RANDOM()').first.id }
end
trait :random_user do
user_id { User.order('RANDOM()').first.id }
end
end
end
================================================
FILE: spec/factories/item.rb
================================================
FactoryBot.define do
factory :item do
name { Faker::Lorem.sentence }
end
end
================================================
FILE: spec/factories/posts.rb
================================================
FactoryBot.define do
factory :post do
title { Faker::Lorem.sentence }
content { Faker::Lorem.paragraph }
end
end
================================================
FILE: spec/factories/tags.rb
================================================
FactoryBot.define do
factory :tag do
name { Faker::Lorem.sentence }
end
end
================================================
FILE: spec/factories/texts.rb
================================================
FactoryBot.define do
factory :text do
content { Faker::Lorem.sentence }
end
end
================================================
FILE: spec/factories/users.rb
================================================
FactoryBot.define do
factory :user do
name { Faker::Name.name }
role { 'visitor' }
end
end
================================================
FILE: spec/factories/videos.rb
================================================
FactoryBot.define do
factory :video do
title { Faker::Lorem.sentence }
end
end
================================================
FILE: spec/fixtures/migrations/20250101000001_create_users.rb
================================================
================================================
FILE: spec/fixtures/migrations/20250101000002_create_function_count_users_v1.sql
================================================
================================================
FILE: spec/fixtures/migrations/20250101000003_create_internal_users.rb
================================================
================================================
FILE: spec/fixtures/migrations/20250101000004_update_function_count_users_v2.sql
================================================
================================================
FILE: spec/fixtures/migrations/20250101000005_create_view_all_users_v1.sql
================================================
================================================
FILE: spec/fixtures/migrations/20250101000006_create_type_user_id_v1.sql
================================================
================================================
FILE: spec/fixtures/migrations/20250101000007_remove_function_count_users_v2.sql
================================================
================================================
FILE: spec/initialize.rb
================================================
require_relative '../lib/torque/postgresql/auxiliary_statement'
require_relative '../lib/torque/postgresql/adapter/schema_overrides'
require_relative '../lib/torque/postgresql/adapter/oid/box'
require_relative '../lib/torque/postgresql/adapter/oid/circle'
require_relative '../lib/torque/postgresql/adapter/oid/enum'
require_relative '../lib/torque/postgresql/adapter/oid/enum_set'
require_relative '../lib/torque/postgresql/adapter/oid/interval'
require_relative '../lib/torque/postgresql/adapter/oid/line'
require_relative '../lib/torque/postgresql/adapter/oid/segment'
require_relative '../lib/torque/postgresql/attributes/enum'
require_relative '../lib/torque/postgresql/attributes/enum_set'
require_relative '../lib/torque/postgresql/attributes/period'
require_relative '../lib/torque/postgresql/attributes/full_text_search'
require_relative '../lib/torque/postgresql/relation/auxiliary_statement'
require_relative '../lib/torque/postgresql/relation/join_series'
require_relative '../lib/torque/postgresql/relation/buckets'
require_relative '../lib/torque/postgresql/versioned_commands'
module Torque
module PostgreSQL
ActiveRecord::Base.belongs_to_many_required_by_default = false
Attributes::Enum.include_on(ActiveRecord::Base)
Attributes::EnumSet.include_on(ActiveRecord::Base)
Attributes::Period.include_on(ActiveRecord::Base)
Attributes::FullTextSearch.include_on(ActiveRecord::Base)
Relation.include(Relation::AuxiliaryStatement)
Relation.include(Relation::JoinSeries)
Relation.include(Relation::Buckets)
config.versioned_commands.enabled = true
ActiveRecord::Schema::Definition.include(Adapter::Definition)
::Object.const_set('TorqueCTE', AuxiliaryStatement)
::Object.const_set('TorqueRecursiveCTE', AuxiliaryStatement::Recursive)
config.enum.namespace = ::Object.const_set('Enum', Module.new)
config.enum.namespace.define_singleton_method(:const_missing) do |name|
Attributes::Enum.lookup(name)
end
config.enum.namespace.define_singleton_method(:sample) do |name|
Attributes::Enum.lookup(name).sample
end
ar_type = ActiveRecord::Type
ar_type.register(:enum, Adapter::OID::Enum, adapter: :postgresql)
ar_type.register(:enum_set, Adapter::OID::EnumSet, adapter: :postgresql)
ar_type.register(:box, Adapter::OID::Box, adapter: :postgresql)
ar_type.register(:circle, Adapter::OID::Circle, adapter: :postgresql)
ar_type.register(:line, Adapter::OID::Line, adapter: :postgresql)
ar_type.register(:segment, Adapter::OID::Segment, adapter: :postgresql)
ar_type.register(:interval, Adapter::OID::Interval, adapter: :postgresql)
Arel.build_operations(config.arel.infix_operators)
ActiveRecord::Base.connection.torque_load_additional_types
end
end
================================================
FILE: spec/mocks/cache_query.rb
================================================
module Mocks
module CacheQuery
def get_last_executed_query(&block)
cache = ActiveRecord::Base.connection.query_cache
cache.instance_variable_set(:@enabled, true)
map = cache.instance_variable_get(:@map)
block.call
result = map.keys.first
cache.instance_variable_set(:@enabled, false)
map.delete(result)
result
end
def get_query_with_binds(&block)
result = nil
original_method = ActiveRecord::Base.connection.method(:raw_execute)
original_method.receiver.define_singleton_method(:raw_execute) do |*args, **xargs, &block|
result ||= [args.first, args.third]
super(*args, **xargs, &block)
end
block.call
original_method.receiver.define_singleton_method(:raw_execute, &original_method.to_proc)
result
end
end
end
================================================
FILE: spec/mocks/create_table.rb
================================================
module Mocks
module CreateTable
def mock_create_table
around do |example|
original_method = ActiveRecord::Base.connection.method(:log)
original_method.receiver.define_singleton_method(:log) do |sql, *, **, &block|
sql
end
example.run
original_method.receiver.define_singleton_method(:log, &original_method.to_proc)
end
end
end
end
================================================
FILE: spec/models/activity.rb
================================================
class Activity < ActiveRecord::Base
belongs_to :author
end
================================================
FILE: spec/models/activity_book.rb
================================================
require_relative 'activity'
class ActivityBook < Activity
end
================================================
FILE: spec/models/activity_post/sample.rb
================================================
class ActivityPost < Activity
class Sample < ActivityPost
end
end
================================================
FILE: spec/models/activity_post.rb
================================================
require_relative 'activity'
class ActivityPost < Activity
belongs_to :post
end
require_relative 'activity_post/sample'
================================================
FILE: spec/models/author.rb
================================================
class Author < ActiveRecord::Base
has_many :activities, -> { cast_records }
has_many :posts
end
================================================
FILE: spec/models/author_journalist.rb
================================================
require_relative 'author'
class AuthorJournalist < Author
end
================================================
FILE: spec/models/category.rb
================================================
class Category< ActiveRecord::Base
end
================================================
FILE: spec/models/comment.rb
================================================
class Comment < ActiveRecord::Base
belongs_to :user
end
================================================
FILE: spec/models/course.rb
================================================
class Course < ActiveRecord::Base
end
================================================
FILE: spec/models/geometry.rb
================================================
class Geometry < ActiveRecord::Base
end
================================================
FILE: spec/models/guest_comment.rb
================================================
require_relative 'comment'
class GuestComment < Comment
end
================================================
FILE: spec/models/internal/user.rb
================================================
module Internal
class User < ActiveRecord::Base
self.schema = 'internal'
end
end
================================================
FILE: spec/models/item.rb
================================================
class Item < ActiveRecord::Base
belongs_to_many :tags
end
================================================
FILE: spec/models/post.rb
================================================
class Post < ActiveRecord::Base
belongs_to :author
belongs_to :activity
scope :test_scope, -> { where('1=1') }
end
================================================
FILE: spec/models/question.rb
================================================
class Question < ActiveRecord::Base
self.implicit_order_column = 'created_at'
end
================================================
FILE: spec/models/question_select.rb
================================================
require_relative 'question'
class QuestionSelect < Question
end
================================================
FILE: spec/models/tag.rb
================================================
class Tag < ActiveRecord::Base
end
================================================
FILE: spec/models/text.rb
================================================
class Text < ActiveRecord::Base
end
================================================
FILE: spec/models/time_keeper.rb
================================================
class TimeKeeper < ActiveRecord::Base
end
================================================
FILE: spec/models/user.rb
================================================
class User < ActiveRecord::Base
has_many :comments
auxiliary_statement :last_comment do |cte|
cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)
cte.attributes id: :comment_id, content: :comment_content
end
end
================================================
FILE: spec/models/video.rb
================================================
class Video < ActiveRecord::Base
end
================================================
FILE: spec/schema.rb
================================================
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
version = 7
return if ActiveRecord::Migrator.current_version == version
ActiveRecord::Schema.define(version: version) do
self.verbose = false
# These are extensions that must be enabled in order to support this database
enable_extension "pgcrypto"
enable_extension "plpgsql"
# Custom schemas used in this database.
create_schema "internal", force: :cascade
# Custom types defined in this database.
# Note that some types may not work with other database engines. Be careful if changing database.
create_enum "content_status", ["created", "draft", "published", "archived"]
create_enum "specialties", ["books", "movies", "plays"]
create_enum "roles", ["visitor", "assistant", "manager", "admin"]
create_enum "conflicts", ["valid", "invalid", "untrusted"]
create_enum "types", ["A", "B", "C", "D"]
create_table "geometries", force: :cascade do |t|
t.point "point"
t.line "line"
t.lseg "lseg"
t.box "box"
t.path "closed_path"
t.path "open_path"
t.polygon "polygon"
t.circle "circle"
end
create_table "time_keepers", force: :cascade do |t|
t.daterange "available"
t.tsrange "period"
t.tstzrange "tzperiod"
t.interval "th"
end
create_table "tags", force: :cascade do |t|
t.string "name"
end
create_table "videos", force: :cascade do |t|
t.bigint "tag_ids", array: true
t.string "title"
t.string "url"
t.enum "type", enum_type: :types
t.enum "conflicts", enum_type: :conflicts, array: true
t.jsonb "metadata"
# t.column "pieces", :int4multirange
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "authors", force: :cascade do |t|
t.string "name"
t.string "type"
t.enum "specialty", enum_type: :specialties
end
create_table "categories", force: :cascade do |t|
t.integer "parent_id"
t.string "title"
end
create_table "texts", force: :cascade do |t|
t.integer "user_id"
t.string "content"
t.enum "conflict", enum_type: :conflicts
end
create_table "comments", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "comment_id"
t.integer "video_id"
t.text "content", null: false
t.string "kind"
t.index ["user_id"], name: "index_comments_on_user_id", using: :btree
t.index ["comment_id"], name: "index_comments_on_comment_id", using: :btree
end
create_table "courses", force: :cascade do |t|
t.integer "category_id"
t.string "title", null: false
t.interval "duration"
t.enum "types", enum_type: :types, array: true
t.search_language "lang", null: false, default: 'english'
t.search_vector "search_vector", columns: :title, language: :lang
t.tsvector "unhandled"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "images", force: :cascade, id: false do |t|
t.string "file"
end
create_table "posts", force: :cascade do |t|
t.integer "author_id"
t.integer "activity_id"
t.string "title"
t.text "content"
t.enum "status", enum_type: :content_status
t.search_vector "search_vector", columns: %i[title content]
t.index ["author_id"], name: "index_posts_on_author_id", using: :btree
end
create_table "items", force: :cascade do |t|
t.string "name"
t.bigint "tag_ids", array: true, default: "{1}"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "name", null: false
t.enum "role", enum_type: :roles, default: :visitor
t.integer "age"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", schema: "internal", force: :cascade do |t|
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["email"], name: "index_internal_users_on_email", unique: true
end
create_table "activities", force: :cascade do |t|
t.integer "author_id"
t.string "title"
t.boolean "active"
t.enum "kind", enum_type: :types
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "questions", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string "title"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "activity_books", force: :cascade, inherits: :activities do |t|
t.text "description"
t.string "url"
t.boolean "activated"
end
create_table "activity_posts", force: :cascade, inherits: [:activities, :images] do |t|
t.integer "post_id"
t.string "url"
t.integer "activated"
end
create_table "activity_post_samples", force: :cascade, inherits: :activity_posts
create_table "question_selects", force: :cascade, inherits: :questions do |t|
t.string "options", array: true
end
# create_table "activity_blanks", force: :cascade, inherits: :activities
# create_table "activity_images", force: :cascade, inherits: [:activities, :images]
add_foreign_key "posts", "authors"
end
ActiveRecord::Base.connection.schema_cache.clear!
================================================
FILE: spec/spec_helper.rb
================================================
require 'torque-postgresql'
require 'database_cleaner'
require 'factory_bot'
require 'dotenv'
require 'faker'
require 'rspec'
begin
require 'debug/prelude'
rescue LoadError
# No debugger available, skip
end
Dotenv.load
ActiveRecord::Base.establish_connection(ENV['DATABASE_URL'] || {
adapter: 'postgresql',
username: 'travis',
port: 5433,
})
cache = ActiveRecord::Base.connection.schema_cache
cleaner = ->() do
cache.instance_variable_set(:@inheritance_loaded, false)
cache.instance_variable_set(:@inheritance_dependencies, {})
cache.instance_variable_set(:@inheritance_associations, {})
end
# Load all the files that are optional and managed by Railtie
require_relative 'initialize'
# This needs to come after loading all optional features
require_relative 'schema'
Dir.glob(File.join('spec', '{models,factories,mocks}', '**', '*.rb')) do |file|
require file[5..-4]
end
cleaner.call
I18n.load_path << Pathname.pwd.join('spec', 'en.yml')
RSpec.configure do |config|
config.extend Mocks::CreateTable
config.include Mocks::CacheQuery
config.include FactoryBot::Syntax::Methods
config.formatter = :documentation
config.color = true
config.tty = true
# Handles acton before rspec initialize
config.before(:suite) do
Torque::PostgreSQL.config.schemas.whitelist << 'internal'
ActiveSupport::Deprecation.try(:silenced=, true)
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
config.before(:each) do
cleaner.call
end
end
================================================
FILE: spec/tests/arel_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Arel' do
context 'on inflix operation' do
let(:collector) { ::Arel::Collectors::SQLString }
let(:attribute) { ::Arel::Table.new('a')['sample'] }
let(:conn) { ActiveRecord::Base.connection }
let(:visitor) { ::Arel::Visitors::PostgreSQL.new(conn) }
[
[:overlaps, [1, 2], "ARRAY[1, 2]"],
[:contains, [3, 4], "ARRAY[3, 4]"],
[:contained_by, [5, 6], "ARRAY[5, 6]"],
[:has_key, ::Arel.sql("'a'"), "'a'"],
[:has_all_keys, ['b', 'c'], "ARRAY['b', 'c']"],
[:has_any_keys, ['d', 'e'], "ARRAY['d', 'e']"],
[:strictly_left, ::Arel.sql('numrange(1, 2)'), 'numrange(1, 2)'],
[:strictly_right, ::Arel.sql('numrange(3, 4)'), 'numrange(3, 4)'],
[:doesnt_right_extend, ::Arel.sql('numrange(5, 6)'), 'numrange(5, 6)'],
[:doesnt_left_extend, ::Arel.sql('numrange(7, 8)'), 'numrange(7, 8)'],
[:adjacent_to, ::Arel.sql('numrange(9, 0)'), 'numrange(9, 0)'],
].each do |(operation, value, quoted_value)|
klass_name = operation.to_s.camelize
context "##{operation}" do
let(:operator) { instance.operator }
let(:instance) do
attribute.public_send(operation, value.is_a?(Array) ? ::Arel.array(value) : value)
end
context 'for attribute' do
let(:klass) { ::Arel::Nodes.const_get(klass_name) }
it "returns a new #{klass_name}" do
expect(instance).to be_a(klass)
end
end
context 'for visitor' do
let(:result) { visitor.accept(instance, collector.new).value }
it 'returns a formatted operation' do
expect(result).to be_eql("\"a\".\"sample\" #{operator} #{quoted_value}")
end
end
end
end
end
context 'on default value' do
let(:connection) { ActiveRecord::Base.connection }
after { Author.reset_column_information }
it 'does not break the change column default value method' do
connection.add_column(:authors, :enabled, :boolean)
expect { connection.change_column_default(:authors, :enabled, { from: nil, to: true }) }.not_to raise_error
expect(Author.columns_hash['enabled'].default).to eq('true')
end
it 'does not break jsonb' do
expect { connection.add_column(:authors, :profile, :jsonb, default: []) }.not_to raise_error
expect(Author.columns_hash['profile'].default).to eq('[]')
condition = Author.arel_table['profile'].is_distinct_from([])
expect(Author.where(condition).to_sql).to eq(<<~SQL.squish)
SELECT "authors".* FROM "authors" WHERE "authors"."profile" IS DISTINCT FROM '[]'
SQL
end
it 'works properly when column is an array' do
expect { connection.add_column(:authors, :tag_ids, :bigint, array: true, default: []) }.not_to raise_error
expect(Author.new.tag_ids).to eq([])
end
it 'works with an array with enum values for a new enum' do
value = ['a', 'b']
expect do
connection.create_enum(:samples, %i[a b c d])
connection.add_column(:authors, :samples, :enum, enum_type: :samples, array: true, default: value)
end.not_to raise_error
expect(Author.new.samples).to eq(value)
end
it 'works with an array with enum values for an existing enum' do
value = ['visitor', 'assistant']
expect { connection.add_column(:authors, :roles, :enum, enum_type: :roles, array: true, default: value) }.not_to raise_error
expect(Author.new.roles).to eq(value)
end
it 'works with multi dimentional array' do
value = [['1', '2'], ['3', '4']]
expect { connection.add_column(:authors, :tag_ids, :string, array: true, default: value) }.not_to raise_error
expect(Author.new.tag_ids).to eq(value)
end
it 'works with change column default value' do
value = ['2', '3']
connection.add_column(:authors, :tag_ids, :string, array: true)
expect { connection.change_column_default(:authors, :tag_ids, { from: nil, to: value }) }.not_to raise_error
expect(Author.new.tag_ids).to eq(value)
end
end
context 'on cast' do
it 'provides an array method' do
sample1 = ::Arel.array(1, 2, 3, 4)
sample2 = ::Arel.array([1, 2, 3, 4])
sample3 = ::Arel.array(1, 2, 3, 4, cast: 'bigint')
sample4 = ::Arel.array([1, 2, 3, 4], [5, 6, 7, 8], cast: 'integer')
expect(sample1.to_sql).to be_eql('ARRAY[1, 2, 3, 4]')
expect(sample2.to_sql).to be_eql('ARRAY[1, 2, 3, 4]')
expect(sample3.to_sql).to be_eql('ARRAY[1, 2, 3, 4]::bigint[]')
expect(sample4.to_sql).to be_eql('ARRAY[ARRAY[1, 2, 3, 4], ARRAY[5, 6, 7, 8]]::integer[]')
end
it 'provides a cast method' do
attribute = ::Arel::Table.new('a')['sample']
quoted = ::Arel::Nodes::build_quoted([1])
casted = ::Arel::Nodes::build_quoted(1, attribute)
expect(attribute.pg_cast('text').to_sql).to be_eql('"a"."sample"::text')
expect(quoted.pg_cast('bigint', true).to_sql).to be_eql('ARRAY[1]::bigint[]')
expect(casted.pg_cast('string').to_sql).to be_eql("1::string")
end
it 'provides proper support to cast methods' do
attribute = ::Arel::Table.new('a')['sample']
quoted = ::Arel::Nodes::build_quoted([1])
casted = ::Arel::Nodes::build_quoted(1)
expect(attribute.cast('text').to_sql).to be_eql('"a"."sample"::text')
expect(quoted.cast('bigint', true).to_sql).to be_eql('ARRAY[1]::bigint[]')
changed_result = ActiveRecord.gem_version >= Gem::Version.new('8.0.2')
changed_result = changed_result ? 'CAST(1 AS string)' : '1::string'
expect(casted.pg_cast('string').to_sql).to be_eql("1::string")
end
it 'properly works combined on a query' do
condition = Video.arel_table[:tag_ids].contains([1,2]).cast(:bigint, :array)
query = Video.all.where(condition).to_sql
expect(query).to include('WHERE "videos"."tag_ids" @> ARRAY[1, 2]::bigint[]')
condition = QuestionSelect.arel_table[:options].overlaps(%w[a b]).cast(:string, :array)
query = QuestionSelect.all.where(condition).to_sql
expect(query).to include('"options" && ARRAY[\'a\', \'b\']::string[]')
end
end
end
================================================
FILE: spec/tests/auxiliary_statement_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'AuxiliaryStatement' do
before :each do
User.auxiliary_statements_list = {}
end
context 'on relation' do
let(:klass) { User }
let(:true_value) { 'TRUE' }
subject { klass.unscoped }
it 'has its method' do
expect(subject).to respond_to(:with)
end
it 'can perform simple queries' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_content" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'can perform more complex queries' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.distinct_on(:user_id).order(:user_id, id: :desc)
cte.attributes content: :last_comment
end
result = 'WITH "comments" AS (SELECT DISTINCT ON ( "comments"."user_id" )'
result << ' "comments"."content" AS last_comment, "comments"."user_id"'
result << ' FROM "comments" ORDER BY "comments"."user_id" ASC,'
result << ' "comments"."id" DESC) SELECT "users".*,'
result << ' "comments"."last_comment" FROM "users" INNER JOIN "comments"'
result << ' ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts extra select columns' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."slug" AS comment_slug, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_content", "comments"."comment_slug" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments, select: {slug: :comment_slug}).arel.to_sql).to eql(result)
end
it 'accepts extra join columns' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id", "comments"."active" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_content" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id" AND "comments"."active" = "users"."active"'
expect(subject.with(:comments, join: {active: :active}).arel.to_sql).to eql(result)
end
it 'accepts extra conditions' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id"'
result << ' FROM "comments" WHERE "comments"."active" = $1)'
result << ' SELECT "users".*, "comments"."comment_content" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments, where: {active: true}).arel.to_sql).to eql(result)
end
it 'accepts scopes from both sides' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.where(id: 1).all
cte.attributes content: :comment_content
end
query = subject.where(id: 2).with(:comments)
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments"'
result << ' WHERE "comments"."id" = $1)'
result << ' SELECT "users".*, "comments"."comment_content" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
result << ' WHERE "users"."id" = $2'
expect(query.arel.to_sql).to eql(result)
expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1, 2])
end
it 'accepts string as attributes' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes sql('MAX(id)') => :comment_id
end
result = 'WITH "comments" AS'
result << ' (SELECT MAX(id) AS comment_id, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_id" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts complex string as attributes' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes sql('ROW_NUMBER() OVER (PARTITION BY ORDER BY "comments"."id")') => :comment_id
end
result = 'WITH "comments" AS'
result << ' (SELECT ROW_NUMBER() OVER (PARTITION BY ORDER BY "comments"."id") AS comment_id, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_id" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts arel attribute as attributes' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes col(:id).minimum => :comment_id
end
result = 'WITH "comments" AS'
result << ' (SELECT MIN("comments"."id") AS comment_id, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment_id" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts custom join properties' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
cte.join name: :id, 'a.col' => :col
end
result = 'WITH "comments" AS (SELECT "comments"."content" AS comment_content,'
result << ' "comments"."id", "comments"."col" FROM "comments") SELECT "users".*,'
result << ' "comments"."comment_content" FROM "users" INNER JOIN "comments"'
result << ' ON "comments"."id" = "users"."name" AND "comments"."col" = "a"."col"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'can perform other types of joins' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
cte.join_type :left
end
result = 'WITH "comments" AS (SELECT "comments"."content" AS comment_content,'
result << ' "comments"."user_id" FROM "comments") SELECT "users".*,'
result << ' "comments"."comment_content" FROM "users" LEFT OUTER JOIN "comments"'
result << ' ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'can manually define the association' do
klass.has_many :sample_comment, class_name: 'Comment', foreign_key: :a_user_id
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.through :sample_comment
cte.attributes content: :sample_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS sample_content, "comments"."a_user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."sample_content" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."a_user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts complex scopes from dependencies' do
klass.send(:auxiliary_statement, :comments1) do |cte|
cte.query Comment.where(id: 1).all
cte.attributes content: :comment_content1
end
klass.send(:auxiliary_statement, :comments2) do |cte|
cte.requires :comments1
cte.query Comment.where(id: 2).all
cte.attributes content: :comment_content2
end
query = subject.where(id: 3).with(:comments2)
result = 'WITH '
result << '"comments1" AS (SELECT "comments"."content" AS comment_content1, "comments"."user_id" FROM "comments" WHERE "comments"."id" = $1), '
result << '"comments2" AS (SELECT "comments"."content" AS comment_content2, "comments"."user_id" FROM "comments" WHERE "comments"."id" = $2)'
result << ' SELECT "users".*, "comments1"."comment_content1", "comments2"."comment_content2" FROM "users"'
result << ' INNER JOIN "comments1" ON "comments1"."user_id" = "users"."id"'
result << ' INNER JOIN "comments2" ON "comments2"."user_id" = "users"."id"'
result << ' WHERE "users"."id" = $3'
expect(query.arel.to_sql).to eql(result)
expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1, 2, 3])
end
context 'with dependency' do
before :each do
klass.send(:auxiliary_statement, :comments1) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content1
end
klass.send(:auxiliary_statement, :comments2) do |cte|
cte.requires :comments1
cte.query Comment.all
cte.attributes content: :comment_content2
end
end
it 'can requires another statement as dependency' do
result = 'WITH '
result << '"comments1" AS (SELECT "comments"."content" AS comment_content1, "comments"."user_id" FROM "comments"), '
result << '"comments2" AS (SELECT "comments"."content" AS comment_content2, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments1"."comment_content1", "comments2"."comment_content2" FROM "users"'
result << ' INNER JOIN "comments1" ON "comments1"."user_id" = "users"."id"'
result << ' INNER JOIN "comments2" ON "comments2"."user_id" = "users"."id"'
expect(subject.with(:comments2).arel.to_sql).to eql(result)
end
it 'can uses already already set dependent' do
result = 'WITH '
result << '"comments1" AS (SELECT "comments"."content" AS comment_content1, "comments"."user_id" FROM "comments"), '
result << '"comments2" AS (SELECT "comments"."content" AS comment_content2, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments1"."comment_content1", "comments2"."comment_content2" FROM "users"'
result << ' INNER JOIN "comments1" ON "comments1"."user_id" = "users"."id"'
result << ' INNER JOIN "comments2" ON "comments2"."user_id" = "users"."id"'
expect(subject.with(:comments1, :comments2).arel.to_sql).to eql(result)
end
it 'raises an error if the dependent does not exist' do
klass.send(:auxiliary_statement, :comments2) do |cte|
cte.requires :comments3
cte.query Comment.all
cte.attributes content: :comment_content2
end
expect{ subject.with(:comments2).arel.to_sql }.to raise_error(ArgumentError)
end
end
context 'query as string' do
it 'performs correctly' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, 'SELECT * FROM comments'
cte.attributes content: :comment
cte.join id: :user_id
end
result = 'WITH "comments" AS (SELECT * FROM comments)'
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'accepts arguments to format the query' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, 'SELECT * FROM comments WHERE active = %{active}'
cte.attributes content: :comment
cte.join id: :user_id
end
result = "WITH \"comments\" AS (SELECT * FROM comments WHERE active = #{true_value})"
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments, args: {active: true}).arel.to_sql).to eql(result)
end
it 'raises an error when join columns are not given' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, 'SELECT * FROM comments'
cte.attributes content: :comment
end
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /join columns/)
end
it 'not raises an error when not given the table name as first argument' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query 'SELECT * FROM comments'
cte.attributes content: :comment
cte.join id: :user_id
end
expect{ subject.with(:comments).arel.to_sql }.not_to raise_error
end
end
context 'query as proc' do
it 'performs correctly for result as relation' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, -> { Comment.all }
cte.attributes content: :comment
cte.join id: :user_id
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'performs correctly for anything that has a call method' do
obj = Struct.new(:call, :arity).new('SELECT * FROM comments', 0)
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, obj
cte.attributes content: :comment
cte.join id: :user_id
end
result = 'WITH "comments" AS (SELECT * FROM comments)'
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'performs correctly for result as string' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, -> { 'SELECT * FROM comments' }
cte.attributes content: :comment
cte.join id: :user_id
end
result = 'WITH "comments" AS (SELECT * FROM comments)'
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(subject.with(:comments).arel.to_sql).to eql(result)
end
it 'performs correctly when the proc requires arguments' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, -> (args) { Comment.where(id: args.id) }
cte.attributes content: :comment
cte.join id: :user_id
end
query = subject.with(:comments, args: {id: 1})
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment, "comments"."user_id"'
result << ' FROM "comments" WHERE "comments"."id" = $1)'
result << ' SELECT "users".*, "comments"."comment" FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
expect(query.arel.to_sql).to eql(result)
expect(query.send(:bound_attributes).map(&:value_before_type_cast)).to eql([1])
end
it 'raises an error when join columns are not given' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, -> { Author.all }
cte.attributes content: :comment
end
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /join columns/)
end
it 'not raises an error when not given the table name as first argument' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query -> { Comment.all }
cte.attributes content: :comment
cte.join id: :user_id
end
expect{ subject.with(:comments).arel.to_sql }.not_to raise_error
end
it 'raises an error when the result of the proc is an invalid type' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :comments, -> { false }
cte.attributes content: :comment
cte.join id: :user_id
end
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /query objects/)
end
end
context 'with inheritance' do
let(:base) { Activity }
let(:klass) { ActivityBook }
it 'accepts ancestors auxiliary statements' do
base.send(:auxiliary_statement, :authors) do |cte|
cte.query Author.all
cte.attributes name: :author_name
cte.join author_id: :id
end
result = 'WITH "authors" AS'
result << ' (SELECT "authors"."name" AS author_name, "authors"."id" FROM "authors")'
result << ' SELECT "activity_books".*, "authors"."author_name" FROM "activity_books"'
result << ' INNER JOIN "authors" ON "authors"."id" = "activity_books"."author_id"'
expect(subject.with(:authors).arel.to_sql).to eql(result)
end
it 'can replace ancestors auxiliary statements' do
base.send(:auxiliary_statement, :authors) do |cte|
cte.query Author.all
cte.attributes name: :author_name
cte.join author_id: :id
end
klass.send(:auxiliary_statement, :authors) do |cte|
cte.query Author.all
cte.attributes type: :author_type
cte.join author_id: :id
end
result = 'WITH "authors" AS'
result << ' (SELECT "authors"."type" AS author_type, "authors"."id" FROM "authors")'
result << ' SELECT "activity_books".*, "authors"."author_type" FROM "activity_books"'
result << ' INNER JOIN "authors" ON "authors"."id" = "activity_books"."author_id"'
expect(subject.with(:authors).arel.to_sql).to eql(result)
end
it 'raises an error when no class has the auxiliary statement' do
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError)
end
end
context 'recursive' do
let(:klass) { Course }
it 'correctly build a recursive cte' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'allows connect to be set to something different using a single value' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.connect :name
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."name", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_name" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."name", "categories"."parent_id"'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_name" = "all_categories"."name"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'allows a complete different set of connect' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.connect left: :right
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."left", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."right" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."left", "categories"."parent_id"'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."right" = "all_categories"."left"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'allows using an union all' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.union_all!
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION ALL'
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'allows having a complete different initiator' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.where(parent_id: 5)
cte.join id: :parent_id
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" = $1'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'can process the depth of the query' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.with_depth
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", 0 AS depth'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ("all_categories"."depth" + 1) AS depth'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'can process and expose the depth of the query' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.with_depth 'd', start: 10, as: :category_depth
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", 10 AS d'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ("all_categories"."d" + 1) AS d'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".*, "all_categories"."d" AS category_depth FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'can process the path of the query' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.with_path
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY["categories"."id"]::varchar[] AS path'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY_APPEND("all_categories"."path", "categories"."id"::varchar) AS path'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'can process and expose the path of the query' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
cte.with_path 'p', source: :name, as: :category_path
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY["categories"."name"]::varchar[] AS p'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY_APPEND("all_categories"."p", "categories"."name"::varchar) AS p'
result << ' FROM "categories", "all_categories"'
result << ' WHERE "categories"."parent_id" = "all_categories"."id"'
result << ' ) SELECT "courses".*, "all_categories"."p" AS category_path FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'works with string queries' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query 'SELECT * FROM categories WHERE a IS NULL'
cte.sub_query 'SELECT * FROM categories, all_categories WHERE all_categories.a = b'
cte.join id: :parent_id
end
result = 'WITH RECURSIVE "all_categories" AS ('
result << 'SELECT * FROM categories WHERE a IS NULL'
result << ' UNION '
result << ' SELECT * FROM categories, all_categories WHERE all_categories.a = b'
result << ') SELECT "courses".* FROM "courses" INNER JOIN "all_categories"'
result << ' ON "all_categories"."parent_id" = "courses"."id"'
expect(subject.with(:all_categories).arel.to_sql).to eql(result)
end
it 'raises an error when query is a string and there is no sub query' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query 'SELECT * FROM categories WHERE a IS NULL'
cte.join id: :parent_id
end
expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /generate sub query/)
end
it 'raises an error when sub query has an invalid type' do
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query 'SELECT * FROM categories WHERE a IS NULL'
cte.sub_query -> { 1 }
cte.join id: :parent_id
end
expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /query and sub query objects/)
end
it 'raises an error when connect can be resolved automatically' do
allow(klass).to receive(:primary_key).and_return(nil)
klass.send(:recursive_auxiliary_statement, :all_categories) do |cte|
cte.query Category.all
cte.join id: :parent_id
end
expect{ subject.with(:all_categories).arel.to_sql }.to raise_error(ArgumentError, /setting up a proper way to connect/)
end
end
it 'works with count and does not add extra columns' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments")'
result << ' SELECT COUNT(*) FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
query = get_last_executed_query { subject.with(:comments).count }
expect(query).to eql(result)
end
it 'works with sum and does not add extra columns' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes id: :value
end
result = 'WITH "comments" AS'
result << ' (SELECT "comments"."id" AS value, "comments"."user_id" FROM "comments")'
result << ' SELECT SUM("comments"."value") FROM "users"'
result << ' INNER JOIN "comments" ON "comments"."user_id" = "users"."id"'
query = get_last_executed_query { subject.with(:comments).sum(comments: :value) }
expect(query).to eql(result)
end
it 'raises an error when using an invalid type of object as query' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query :string, String
end
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError, /object types/)
end
it 'raises an error when trying to use a statement that is not defined' do
expect{ subject.with(:does_not_exist).arel.to_sql }.to raise_error(ArgumentError)
end
it 'raises an error when using an invalid type of join' do
klass.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
cte.join_type :invalid
end
expect{ subject.with(:comments).arel.to_sql }.to raise_error(ArgumentError)
end
end
context 'on model' do
subject { User }
it 'has its configurator' do
expect(subject.protected_methods).to include(:cte)
expect(subject.protected_methods).to include(:auxiliary_statement)
end
it 'has the recursive configuration' do
expect(subject.protected_methods).to include(:recursive_cte)
expect(subject.protected_methods).to include(:recursive_auxiliary_statement)
end
it 'allows configure new auxiliary statements' do
subject.send(:auxiliary_statement, :cte1)
expect(subject.auxiliary_statements_list).to include(:cte1)
expect(subject.const_defined?('Cte1_AuxiliaryStatement')).to be_truthy
end
it 'has its query method' do
expect(subject).to respond_to(:with)
end
it 'returns a relation when using the method' do
subject.send(:auxiliary_statement, :comments) do |cte|
cte.query Comment.all
cte.attributes content: :comment_content
end
expect(subject.with(:comments)).to be_a(ActiveRecord::Relation)
end
end
context 'on external' do
let(:klass) { Torque::PostgreSQL::AuxiliaryStatement }
subject { User }
it 'has the external method available' do
expect(klass).to respond_to(:create)
end
it 'accepts simple auxiliary statement definition' do
sample = klass.create(Comment.all)
query = subject.with(sample, select: {content: :comment_content}).arel.to_sql
result = 'WITH "comment" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comment"."comment_content" FROM "users"'
result << ' INNER JOIN "comment" ON "comment"."user_id" = "users"."id"'
expect(query).to eql(result)
end
it 'accepts a hash auxiliary statement definition' do
sample = klass.create(query: Comment.all, select: {content: :comment_content})
query = subject.with(sample).arel.to_sql
result = 'WITH "comment" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "comment"."comment_content" FROM "users"'
result << ' INNER JOIN "comment" ON "comment"."user_id" = "users"."id"'
expect(query).to eql(result)
end
it 'accepts a block when creating the auxiliary statement' do
sample = klass.create(:all_comments) do |cte|
cte.query Comment.all
cte.select content: :comment_content
end
result = 'WITH "all_comments" AS'
result << ' (SELECT "comments"."content" AS comment_content, "comments"."user_id" FROM "comments")'
result << ' SELECT "users".*, "all_comments"."comment_content" FROM "users"'
result << ' INNER JOIN "all_comments" ON "all_comments"."user_id" = "users"."id"'
query = subject.with(sample).arel.to_sql
expect(query).to eql(result)
end
context 'recursive' do
let(:klass) { Torque::PostgreSQL::AuxiliaryStatement::Recursive }
subject { Course }
it 'has the external method available' do
expect(klass).to respond_to(:create)
end
it 'accepts simple recursive auxiliary statement definition' do
settings = { join: { id: :parent_id } }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories", "category"'
result << ' WHERE "categories"."parent_id" = "category"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
it 'accepts a connect option' do
settings = { join: { id: :parent_id }, connect: { a: :b } }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."a", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."b" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."a", "categories"."parent_id"'
result << ' FROM "categories", "category"'
result << ' WHERE "categories"."b" = "category"."a"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
it 'accepts an union all option' do
settings = { join: { id: :parent_id }, union_all: true }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION ALL'
result << ' SELECT "categories"."id", "categories"."parent_id"'
result << ' FROM "categories", "category"'
result << ' WHERE "categories"."parent_id" = "category"."id"'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
it 'accepts a sub query option' do
settings = { join: { id: :parent_id }, sub_query: Category.where(active: true) }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id" FROM "categories"'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id" FROM "categories", "category" WHERE "categories"."active" = $1'
result << ' ) SELECT "courses".* FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
it 'accepts a depth option' do
settings = { join: { id: :parent_id }, with_depth: { name: 'a', start: 5, as: 'b' } }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", 5 AS a'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ("category"."a" + 1) AS a'
result << ' FROM "categories", "category"'
result << ' WHERE "categories"."parent_id" = "category"."id"'
result << ' ) SELECT "courses".*, "category"."a" AS b FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
it 'accepts a path option' do
settings = { join: { id: :parent_id }, with_path: { name: 'a', source: 'b', as: 'c' } }
query = subject.with(klass.create(Category.all), **settings).arel.to_sql
result = 'WITH RECURSIVE "category" AS ('
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY["categories"."b"]::varchar[] AS a'
result << ' FROM "categories"'
result << ' WHERE "categories"."parent_id" IS NULL'
result << ' UNION'
result << ' SELECT "categories"."id", "categories"."parent_id", ARRAY_APPEND("category"."a", "categories"."b"::varchar) AS a'
result << ' FROM "categories", "category"'
result << ' WHERE "categories"."parent_id" = "category"."id"'
result << ' ) SELECT "courses".*, "category"."a" AS c FROM "courses" INNER JOIN "category"'
result << ' ON "category"."parent_id" = "courses"."id"'
expect(query).to eql(result)
end
end
end
context 'on settings' do
let(:base) { User }
let(:statement_klass) do
base.send(:auxiliary_statement, :statement)
base::Statement_AuxiliaryStatement
end
subject do
Torque::PostgreSQL::AuxiliaryStatement::Settings.new(base, statement_klass)
end
it 'has access to base' do
expect(subject.base).to eql(User)
expect(subject.base_table).to be_a(Arel::Table)
end
it 'has access to statement table' do
expect(subject.table_name).to eql('statement')
expect(subject.table).to be_a(Arel::Table)
end
it 'has access to the query arel table' do
subject.query Comment.all
expect(subject.query_table).to be_a(Arel::Table)
end
it 'raises an error when trying to access query table before defining the query' do
expect{ subject.with(:comments).arel.to_sql }.to raise_error(StandardError)
end
end
end
================================================
FILE: spec/tests/belongs_to_many_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'BelongsToMany' do
context 'on model' do
let(:model) { Video }
let(:key) { :tests }
let(:builder) { Torque::PostgreSQL::Associations::Builder::BelongsToMany }
let(:reflection) { Torque::PostgreSQL::Reflection::BelongsToManyReflection }
after { model._reflections = {} }
it 'has the builder method' do
expect(model).to respond_to(:belongs_to_many)
end
it 'triggers the correct builder and relation' do
expect(builder).to receive(:build).with(anything, :tests, nil, {}) do |_, name, _, _|
ActiveRecord::Reflection.create(:belongs_to_many, name, nil, {}, model)
end
expect(reflection).to receive(:new).with(:tests, nil, {}, model)
model.belongs_to_many(:tests)
end
it 'allows setting up foreign key and primary_key as symbol' do
model.belongs_to_many(:tests, foreign_key: :test_ids, primary_key: :test_id)
reflection = model._reflections[key]
expect(reflection.foreign_key).to be_eql('test_ids')
expect(reflection.active_record_primary_key).to be_eql('test_id')
end
end
context 'on association' do
let(:other) { Tag }
let(:key) { :tags }
let(:initial) { FactoryBot.create(:tag) }
before { Video.belongs_to_many(:tags) }
subject { Video.create(title: 'A') }
after do
Video.reset_callbacks(:save)
Video._reflections = {}
end
it 'has the method' do
expect(subject).to respond_to(:tags)
expect(subject._reflections).to include(key)
end
it 'has correct foreign key' do
item = subject._reflections[key]
expect(item.foreign_key).to be_eql('tag_ids')
end
it 'loads associated records' do
subject.update(tag_ids: [initial.id])
expect(subject.tags.to_sql).to be_eql(<<-SQL.squish)
SELECT "tags".* FROM "tags" WHERE "tags"."id" = #{initial.id}
SQL
expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.tags.to_a).to be_eql([initial])
end
it 'can be marked as loaded' do
expect(subject.tags.loaded?).to be_eql(false)
expect(subject.tags).to respond_to(:load_target)
expect(subject.tags.load_target).to be_eql([])
expect(subject.tags.loaded?).to be_eql(true)
end
it 'can find specific records' do
records = FactoryBot.create_list(:tag, 10)
subject.update(tag_ids: records.map(&:id))
ids = records.map(&:id).sample(5)
expect(subject.tags).to respond_to(:find)
records = subject.tags.find(*ids)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return last n records' do
records = FactoryBot.create_list(:tag, 10)
subject.update(tag_ids: records.map(&:id))
ids = records.map(&:id).last(5)
expect(subject.tags).to respond_to(:last)
records = subject.tags.last(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return first n records' do
records = FactoryBot.create_list(:tag, 10)
subject.update(tag_ids: records.map(&:id))
ids = records.map(&:id).first(5)
expect(subject.tags).to respond_to(:take)
records = subject.tags.take(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can create the owner record with direct set items' do
# Having another association would break this test due to how
# +@new_record_before_save+ is set on autosave association
Video.has_many(:comments)
record = Video.create(title: 'A', tags: [initial])
record.reload
expect(record.tags.size).to be_eql(1)
expect(record.tags.first.id).to be_eql(initial.id)
end
it 'can keep record changes accordingly' do
expect(subject.tags.count).to be_eql(0)
local_previous_changes = nil
local_saved_changes = nil
Video.after_commit do
local_previous_changes = self.previous_changes.dup
local_saved_changes = self.saved_changes.dup
end
subject.update(title: 'B')
expect(local_previous_changes).to include('title')
expect(local_saved_changes).to include('title')
subject.tags = FactoryBot.create_list(:tag, 5)
subject.update(title: 'C', url: 'X')
subject.reload
expect(local_previous_changes).to include('title', 'url')
expect(local_saved_changes).to include('title', 'url')
expect(local_previous_changes).not_to include('tag_ids')
expect(local_saved_changes).not_to include('tag_ids')
expect(subject.tag_ids.size).to be_eql(5)
expect(subject.tags.count).to be_eql(5)
end
it 'can assign the record ids during before callback' do
Video.before_save { self.tags = FactoryBot.create_list(:tag, 5) }
record = Video.create(title: 'A')
expect(Tag.count).to be_eql(5)
expect(record.tag_ids.size).to be_eql(5)
expect(record.tags.count).to be_eql(5)
end
it 'does not trigger after commit on the associated record' do
called = false
tag = FactoryBot.create(:tag)
Tag.after_commit { called = true }
expect(called).to be_falsey
subject.tags << tag
expect(subject.tag_ids).to be_eql([tag.id])
expect(called).to be_falsey
Tag.reset_callbacks(:commit)
end
it 'can build an associated record' do
record = subject.tags.build(name: 'Test')
expect(record).to be_a(other)
expect(record).not_to be_persisted
expect(record.name).to be_eql('Test')
expect(subject.tags.target).to be_eql([record])
expect(subject.save && subject.reload).to be_truthy
expect(subject.tag_ids).to be_eql([record.id])
expect(subject.tags.size).to be_eql(1)
end
it 'can create an associated record' do
record = subject.tags.create(name: 'Test')
expect(subject.tags).to respond_to(:create!)
expect(record).to be_a(other)
expect(record).to be_persisted
expect(record.name).to be_eql('Test')
expect(subject.tag_ids).to be_eql([record.id])
end
it 'can concat records' do
record = FactoryBot.create(:tag)
subject.update(tag_ids: [record.id])
expect(subject.tags.size).to be_eql(1)
subject.tags.concat(other.new(name: 'Test'))
subject.reload
expect(subject.tags.size).to be_eql(2)
expect(subject.tag_ids.size).to be_eql(2)
expect(subject.tags.last.name).to be_eql('Test')
end
it 'can replace records' do
subject.tags << FactoryBot.create(:tag)
expect(subject.tags.size).to be_eql(1)
subject.tags = [other.new(name: 'Test 1')]
subject.reload
expect(subject.tags.size).to be_eql(1)
expect(subject.tags[0].name).to be_eql('Test 1')
subject.tags.replace([other.new(name: 'Test 2'), other.new(name: 'Test 3')])
subject.reload
expect(subject.tags.size).to be_eql(2)
expect(subject.tags[0].name).to be_eql('Test 2')
expect(subject.tags[1].name).to be_eql('Test 3')
end
it 'can delete specific records' do
subject.tags << initial
expect(subject.tags.size).to be_eql(1)
subject.tags.delete(initial)
expect(subject.tags.size).to be_eql(0)
expect(subject.reload.tags.size).to be_eql(0)
end
it 'can delete all records' do
subject.tags.concat(FactoryBot.create_list(:tag, 5))
expect(subject.tags.size).to be_eql(5)
subject.tags.delete_all
expect(subject.tags.size).to be_eql(0)
end
it 'can destroy all records' do
subject.tags.concat(FactoryBot.create_list(:tag, 5))
expect(subject.tags.size).to be_eql(5)
subject.tags.destroy_all
expect(subject.tags.size).to be_eql(0)
end
it 'can clear the array' do
record = Video.create(title: 'B', tags: [initial])
expect(record.tags.size).to be_eql(1)
record.update(tag_ids: [])
record.reload
expect(record.tag_ids).to be_nil
expect(record.tags.size).to be_eql(0)
end
it 'can have sum operations' do
records = FactoryBot.create_list(:tag, 5)
subject.tags.concat(records)
result = records.map(&:id).reduce(:+)
expect(subject.tags).to respond_to(:sum)
expect(subject.tags.sum(:id)).to be_eql(result)
end
it 'can have a pluck operation' do
records = FactoryBot.create_list(:tag, 5)
subject.tags.concat(records)
result = records.map(&:name).sort
expect(subject.tags).to respond_to(:pluck)
expect(subject.tags.pluck(:name).sort).to be_eql(result)
end
it 'can be markes as empty' do
expect(subject.tags).to respond_to(:empty?)
expect(subject.tags.empty?).to be_truthy
subject.tags << FactoryBot.create(:tag)
expect(subject.tags.empty?).to be_falsey
end
it 'can check if a record is included on the list' do
outside = FactoryBot.create(:tag)
inside = FactoryBot.create(:tag)
expect(subject.tags).not_to be_include(inside)
expect(subject.tags).not_to be_include(outside)
subject.tags << inside
expect(subject.tags).to respond_to(:include?)
expect(subject.tags).to be_include(inside)
expect(subject.tags).not_to be_include(outside)
end
it 'can append records' do
subject.tags << other.new(name: 'Test 1')
expect(subject.tags.size).to be_eql(1)
subject.tags << other.new(name: 'Test 2')
subject.update(title: 'B')
subject.reload
expect(subject.tags.size).to be_eql(2)
expect(subject.tags.last.name).to be_eql('Test 2')
end
it 'can clear records' do
subject.tags << FactoryBot.create(:tag)
expect(subject.tags.size).to be_eql(1)
subject.tags.clear
expect(subject.tags.size).to be_eql(0)
end
it 'can reload records' do
expect(subject.tags.size).to be_eql(0)
new_tag = FactoryBot.create(:tag)
subject.tags << new_tag
subject.tags.reload
expect(subject.tags.size).to be_eql(1)
expect(subject.tags.first.id).to be_eql(new_tag.id)
record = Video.create(title: 'B', tags: [new_tag])
record.reload
expect(record.tags.size).to be_eql(1)
expect(record.tags.first.id).to be_eql(new_tag.id)
end
it 'can preload records' do
records = FactoryBot.create_list(:tag, 5)
subject.tags.concat(records)
entries = Video.all.includes(:tags).load
expect(entries.size).to be_eql(1)
expect(entries.first.tags).to be_loaded
expect(entries.first.tags.size).to be_eql(5)
end
it 'can preload records using ActiveRecord::Associations::Preloader' do
records = FactoryBot.create_list(:tag, 5)
subject.tags.concat(records)
entries = Video.all
arguments = { records: entries, associations: :tags, available_records: Tag.all.to_a }
ActiveRecord::Associations::Preloader.new(**arguments).call
entries = entries.load
expect(entries.size).to be_eql(1)
expect(entries.first.tags).to be_loaded
expect(entries.first.tags.size).to be_eql(5)
end
it 'can joins records' do
query = Video.all.joins(:tags)
expect(query.to_sql).to match(/INNER JOIN "tags"/)
expect { query.load }.not_to raise_error
end
context 'when handling binds' do
let(:tag_ids) { FactoryBot.create_list(:tag, 5).map(&:id) }
let!(:record) { Video.new(tag_ids: tag_ids) }
it 'uses rails default with in and several binds' do
sql, binds = get_query_with_binds { record.tags.load }
expect(sql).to include(' WHERE "tags"."id" IN ($1, $2, $3, $4, $5)')
expect(binds.size).to be_eql(5)
end
end
context 'when the attribute has a default value' do
subject { FactoryBot.create(:item) }
it 'will always return the column default value' do
expect(subject.tag_ids).to be_a(Array)
expect(subject.tag_ids).to be_eql([1])
end
it 'will keep the value as an array even when the association is cleared' do
records = FactoryBot.create_list(:tag, 5)
subject.tags.concat(records)
subject.reload
expect(subject.tag_ids).to be_a(Array)
expect(subject.tag_ids).not_to be_eql([1, *records.map(&:id)])
subject.tags.clear
subject.reload
expect(subject.tag_ids).to be_a(Array)
expect(subject.tag_ids).to be_eql([1])
end
end
context 'when record is not persisted' do
let(:initial) { FactoryBot.create(:tag) }
subject { Video.new(title: 'A', tags: [initial]) }
it 'loads associated records' do
expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.tags.to_a).to be_eql([initial])
end
end
end
context 'using uuid' do
let(:connection) { ActiveRecord::Base.connection }
let(:game) { Class.new(ActiveRecord::Base) }
let(:player) { Class.new(ActiveRecord::Base) }
let(:other) { player.create }
# TODO: Set as a shared example
before do
connection.create_table(:players, id: :uuid) { |t| t.string :name }
connection.create_table(:games, id: :uuid) { |t| t.uuid :player_ids, array: true }
game.table_name = 'games'
player.table_name = 'players'
game.belongs_to_many :players, anonymous_class: player,
inverse_of: false, foreign_key: :player_ids
end
subject { game.create }
it 'loads one associated records' do
subject.update(player_ids: [other.id])
expect(subject.players.to_sql).to be_eql(<<-SQL.squish)
SELECT "players".* FROM "players" WHERE "players"."id" = '#{other.id}'
SQL
expect(subject.players.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.players.to_a).to be_eql([other])
end
it 'loads several associated records' do
entries = [other, player.create]
subject.update(player_ids: entries.map(&:id))
expect(subject.players.to_sql).to be_eql(<<-SQL.squish)
SELECT "players".* FROM "players"
WHERE "players"."id" IN ('#{entries[0].id}', '#{entries[1].id}')
SQL
expect(subject.players.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.players.to_a).to be_eql(entries)
end
it 'can preload records' do
records = 5.times.map { player.create }
subject.players.concat(records)
entries = game.all.includes(:players).load
expect(entries.size).to be_eql(1)
expect(entries.first.players).to be_loaded
expect(entries.first.players.size).to be_eql(5)
end
it 'can joins records' do
query = game.all.joins(:players)
expect(query.to_sql).to match(/INNER JOIN "players"/)
expect { query.load }.not_to raise_error
end
end
context 'using callbacks' do
let(:tags) { FactoryBot.create_list(:tag, 3) }
let(:collectors) { Hash.new { |h, k| h[k] = [] } }
subject { Video.create(title: 'A') }
after do
Video.reset_callbacks(:save)
Video._reflections = {}
end
before do
subject.update_attribute(:tag_ids, tags.first(2).pluck(:id))
Video.belongs_to_many(:tags,
before_add: ->(_, tag) { collectors[:before_add] << tag },
after_add: ->(_, tag) { collectors[:after_add] << tag },
before_remove: ->(_, tag) { collectors[:before_remove] << tag },
after_remove: ->(_, tag) { collectors[:after_remove] << tag },
)
end
it 'works with id changes' do
subject.tag_ids = tags.drop(1).pluck(:id)
subject.save!
expect(collectors[:before_add]).to be_eql([tags.last])
expect(collectors[:after_add]).to be_eql([tags.last])
expect(collectors[:before_remove]).to be_eql([tags.first])
expect(collectors[:after_remove]).to be_eql([tags.first])
end
it 'works with record changes' do
subject.tags = tags.drop(1)
expect(collectors[:before_add]).to be_eql([tags.last])
expect(collectors[:after_add]).to be_eql([tags.last])
expect(collectors[:before_remove]).to be_eql([tags.first])
expect(collectors[:after_remove]).to be_eql([tags.first])
end
end
context 'using custom keys' do
let(:connection) { ActiveRecord::Base.connection }
let(:post) { Post }
let(:tag) { Tag }
let(:tags) { %w[a b c].map { |id| create(:tag, friendly_id: id) } }
subject { create(:post) }
before do
connection.add_column(:tags, :friendly_id, :string)
connection.add_column(:posts, :friendly_tag_ids, :string, array: true)
post.belongs_to_many(:tags, foreign_key: :friendly_tag_ids, primary_key: :friendly_id)
post.reset_column_information
tag.reset_column_information
end
after do
tag.reset_column_information
post.reset_column_information
post._reflections.delete(:tags)
end
it 'loads associated records' do
subject.update(friendly_tag_ids: tags.pluck(:friendly_id))
expect(subject.tags.to_sql).to be_eql(<<-SQL.squish)
SELECT "tags".* FROM "tags" WHERE "tags"."friendly_id" IN ('a', 'b', 'c')
SQL
expect(subject.tags.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.tags.to_a).to be_eql(tags)
end
it 'can properly assign tags' do
expect(subject.friendly_tag_ids).to be_blank
subject.tags = tags
expect(subject.friendly_tag_ids).to be_eql(%w[a b c])
end
end
end
================================================
FILE: spec/tests/collector_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Data collector', type: :helper do
let(:methods_list) { [:foo, :bar] }
subject { Torque::PostgreSQL::Collector.new(*methods_list) }
it 'is a class creator' do
expect(subject).to be_a(Class)
end
it 'has the requested methods' do
instance = subject.new
methods_list.each do |name|
expect(instance).to respond_to(name)
expect(instance).to respond_to("#{name}=")
end
end
it 'instace values starts as nil' do
instance = subject.new
methods_list.each do |name|
expect(instance.send(name)).to be_nil
end
end
it 'set values on the same method' do
instance = subject.new
methods_list.each do |name|
expect(instance.send(name, name)).to eql(name)
end
end
it 'get value on the same method' do
instance = subject.new
methods_list.each do |name|
instance.send(name, name)
expect(instance.send(name)).to eql(name)
end
end
it 'accepts any kind of value' do
instance = subject.new
instance.foo 123
expect(instance.foo).to eql(123)
instance.foo 'chars'
expect(instance.foo).to eql('chars')
instance.foo :test, :test
expect(instance.foo).to eql([:test, :test])
instance.foo test: :test
expect(instance.foo).to eql({test: :test})
instance.foo nil
expect(instance.foo).to be_nil
end
end
================================================
FILE: spec/tests/distinct_on_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'DistinctOn' do
context 'on relation' do
subject { Post.unscoped }
it 'has its method' do
expect(subject).to respond_to(:distinct_on)
end
it 'does not mess with original distinct form without select' do
expect(subject.distinct.to_sql).to \
eql('SELECT DISTINCT "posts".* FROM "posts"')
end
it 'does not mess with original distinct form with select' do
expect(subject.select(:name).distinct.to_sql).to \
eql('SELECT DISTINCT "name" FROM "posts"')
end
it 'is able to do the basic form' do
expect(subject.distinct_on(:title).to_sql).to \
eql('SELECT DISTINCT ON ( "posts"."title" ) "posts".* FROM "posts"')
end
it 'is able to do with multiple attributes' do
expect(subject.distinct_on(:title, :content).to_sql).to \
eql('SELECT DISTINCT ON ( "posts"."title", "posts"."content" ) "posts".* FROM "posts"')
end
it 'is able to do with relation' do
expect(subject.distinct_on(author: :name).to_sql).to \
eql('SELECT DISTINCT ON ( "authors"."name" ) "posts".* FROM "posts"')
end
it 'is able to do with relation and multiple attributes' do
expect(subject.distinct_on(author: [:name, :age]).to_sql).to \
eql('SELECT DISTINCT ON ( "authors"."name", "authors"."age" ) "posts".* FROM "posts"')
end
it 'raises with invalid relation' do
expect { subject.distinct_on(supervisors: :name).to_sql }.to \
raise_error(ArgumentError, /Relation for/)
end
it 'raises with third level hash' do
expect { subject.distinct_on(author: [comments: :body]).to_sql }.to \
raise_error(ArgumentError, /on third level/)
end
end
context 'on model' do
subject { Post }
it 'has its method' do
expect(subject).to respond_to(:distinct_on)
end
it 'returns a relation when using the method' do
expect(subject.distinct_on(:title)).to be_a(ActiveRecord::Relation)
end
end
end
================================================
FILE: spec/tests/enum_set_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Enum' do
let(:connection) { ActiveRecord::Base.connection }
let(:attribute_klass) { Torque::PostgreSQL::Attributes::EnumSet }
let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }
def decorate(model, field, options = {})
attribute_klass.include_on(model, :enum_set)
model.enum_set(field, **options)
end
before :each do
Torque::PostgreSQL.config.enum.set_method = :pg_set_enum
Torque::PostgreSQL::Attributes::EnumSet.include_on(ActiveRecord::Base)
# Define a method to find yet to define constants
Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:const_missing) do |name|
Torque::PostgreSQL::Attributes::EnumSet.lookup(name)
end
# Define a helper method to get a sample value
Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:sample) do |name|
Torque::PostgreSQL::Attributes::EnumSet.lookup(name).sample
end
end
context 'on table definition' do
subject { table_definition.new(connection, 'articles') }
it 'can be defined as an array' do
subject.enum(:content_status, array: true, enum_type: :content_status)
expect(subject['content_status'].name).to be_eql('content_status')
expect(subject['content_status'].type).to be_eql(:enum)
expect(subject['content_status'].options[:enum_type]).to be_eql(:content_status)
array = subject['content_status'].respond_to?(:options) \
? subject['content_status'].options[:array] \
: subject['content_status'].array
expect(array).to be_eql(true)
end
end
context 'on schema' do
let(:source) { ActiveRecord::Base.connection_pool }
let(:dump_result) do
ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))
dump_result.string
end
it 'can be used on tables' do
checker = /t\.enum +"conflicts", +array: true, +enum_type: "conflicts"/
expect(dump_result).to match checker
end
xit 'can have a default value as an array of symbols' do
checker = /t\.enum +"types", +default: \[:A, :B\], +array: true, +enum_type: "types"/
expect(dump_result).to match checker
end
end
context 'on value' do
subject { Enum::TypesSet }
let(:values) { %w(A B C D) }
let(:error) { Torque::PostgreSQL::Attributes::EnumSet::EnumSetError }
let(:mock_enum) do
enum_klass = Class.new(subject::EnumSource.superclass)
enum_klass.instance_variable_set(:@values, values << '15')
klass = Class.new(subject.superclass)
klass.const_set('EnumSource', enum_klass)
klass
end
it 'class exists' do
namespace = Torque::PostgreSQL.config.enum.namespace
expect(namespace.const_defined?('TypesSet')).to be_truthy
expect(subject.const_defined?('EnumSource')).to be_truthy
expect(subject < Torque::PostgreSQL::Attributes::EnumSet).to be_truthy
end
it 'returns the db type name' do
expect(subject.type_name).to be_eql('types[]')
end
it 'values match database values' do
expect(subject.values).to be_eql(values)
end
it 'values can be reach using fetch, as in hash enums' do
expect(subject).to respond_to(:fetch)
value = subject.fetch('A', 'A')
expect(value).to be_a(subject)
expect(value).to be_eql(subject.A)
value = subject.fetch('other', 'other')
expect(value).to be_nil
end
it 'values can be reach using [], as in hash enums' do
expect(subject).to respond_to(:[])
value = subject['A']
expect(value).to be_a(subject)
expect(value).to be_eql(subject.A)
value = subject['other']
expect(value).to be_nil
end
it 'accepts respond_to against value' do
expect(subject).to respond_to(:A)
end
it 'allows fast creation of values' do
value = subject.A
expect(value).to be_a(subject)
end
it 'keeps blank values as Lazy' do
expect(subject.new(nil)).to be_nil
expect(subject.new([])).to be_blank
end
it 'can start from nil value using lazy' do
lazy = Torque::PostgreSQL::Attributes::Lazy
value = subject.new(nil)
expect(value.__class__).to be_eql(lazy)
expect(value.to_s).to be_eql('')
expect(value.to_i).to be_nil
expect(value.A?).to be_falsey
end
it 'accepts values to come from numeric as power' do
expect(subject.new(0)).to be_blank
expect(subject.new(1)).to be_eql(subject.A)
expect(subject.new(3)).to be_eql(subject.A | subject.B)
expect { subject.new(16) }.to raise_error(error, /out of bounds/)
end
it 'accepts values to come from numeric list' do
expect(subject.new([0])).to be_eql(subject.A)
expect(subject.new([0, 1])).to be_eql(subject.A | subject.B)
expect { subject.new([4]) }.to raise_error(error.superclass, /out of bounds/)
end
it 'accepts string initialization' do
expect(subject.new('A')).to be_eql(subject.A)
expect { subject.new('E') }.to raise_error(error.superclass, /not valid for/)
end
it 'allows values bitwise operations' do
expect((subject.A | subject.B).to_i).to be_eql(3)
expect((subject.A & subject.B).to_i).to be_nil
expect(((subject.A | subject.B) & subject.B).to_i).to be_eql(2)
end
it 'allows values comparison' do
value = subject.B | subject.C
expect(value).to be > subject.A
expect(value).to be < subject.D
expect(value).to be_eql(6)
expect(value).to_not be_eql(1)
expect(subject.A == mock_enum.A).to be_falsey
end
it 'accepts value checking' do
value = subject.B | subject.C
expect(value).to respond_to(:B?)
expect(value.B?).to be_truthy
expect(value.C?).to be_truthy
expect(value.A?).to be_falsey
expect(value.D?).to be_falsey
end
it 'accepts replace and bang value' do
value = subject.B | subject.C
expect(value).to respond_to(:B!)
expect(value.A!).to be_eql(7)
expect(value.replace(:D)).to be_eql(subject.D)
end
it 'accepts values turn into integer by its power' do
expect(subject.B.to_i).to be_eql(2)
expect(subject.C.to_i).to be_eql(4)
end
it 'accepts values turn into an array of integer by index' do
expect((subject.B | subject.C).map(&:to_i)).to be_eql([1, 2])
end
it 'can return a sample for resting purposes' do
expect(subject).to receive(:new).with(Numeric)
subject.sample
end
end
context 'on OID' do
let(:enum) { Enum::TypesSet }
let(:enum_source) { enum::EnumSource }
subject { Torque::PostgreSQL::Adapter::OID::EnumSet.new('types', enum_source) }
context 'on deserialize' do
it 'returns nil' do
expect(subject.deserialize(nil)).to be_nil
end
it 'returns enum' do
value = subject.deserialize('{B,C}')
expect(value).to be_a(enum)
expect(value).to be_eql(enum.B | enum.C)
end
end
context 'on serialize' do
it 'returns nil' do
expect(subject.serialize(nil)).to be_nil
expect(subject.serialize(0)).to be_nil
end
it 'returns as string' do
expect(subject.serialize(enum.B | enum.C)).to be_eql('{B,C}')
expect(subject.serialize(3)).to be_eql('{A,B}')
end
end
context 'on cast' do
it 'accepts nil' do
expect(subject.cast(nil)).to be_nil
end
it 'accepts invalid values as nil' do
expect(subject.cast([])).to be_nil
end
it 'accepts array of strings' do
value = subject.cast(['A'])
expect(value).to be_a(enum)
expect(value).to be_eql(enum.A)
end
it 'accepts array of numbers' do
value = subject.cast([1])
expect(value).to be_a(enum)
expect(value).to be_eql(enum.B)
end
end
end
context 'on I18n' do
subject { Enum::TypesSet }
it 'has the text method' do
expect(subject.new(0)).to respond_to(:text)
end
it 'brings the correct values' do
expect(subject.new(0).text).to be_eql('')
expect(subject.new(1).text).to be_eql('A')
expect(subject.new(2).text).to be_eql('B')
expect(subject.new(3).text).to be_eql('A and B')
expect(subject.new(7).text).to be_eql('A, B, and C')
end
end
context 'on model' do
let(:instance) { Course.new }
before(:each) { decorate(Course, :types) }
subject { Course }
it 'has all enum set methods' do
expect(subject).to respond_to(:types)
expect(subject).to respond_to(:types_keys)
expect(subject).to respond_to(:types_texts)
expect(subject).to respond_to(:types_options)
expect(subject).to respond_to(:has_types)
expect(subject).to respond_to(:has_any_types)
expect(instance).to respond_to(:types_text)
subject.types.each do |value|
value = value.underscore
expect(subject).to respond_to(value)
expect(instance).to respond_to(value + '?')
expect(instance).to respond_to(value + '!')
end
end
it 'scope the model correctly' do
query = subject.a.to_sql
expect(query).to include(%{WHERE "courses"."types" @> '{A}'::types[]})
end
it 'has a match all scope' do
query = subject.has_types('B', 'A').to_sql
expect(query).to include(%{WHERE "courses"."types" @> '{B,A}'::types[]})
end
it 'has a match any scope' do
query = subject.has_any_types('B', 'A').to_sql
expect(query).to include(%{WHERE "courses"."types" && '{B,A}'::types[]})
end
it 'uses bind param instead of raw value' do
sql, binds = get_query_with_binds { subject.has_any_types('B', 'A').load }
expect(sql).to include('WHERE "courses"."types" && $1::types[]')
expect(binds.first.value).to eq(%w[B A])
end
end
end
================================================
FILE: spec/tests/enum_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Enum' do
let(:connection) { ActiveRecord::Base.connection }
let(:attribute_klass) { Torque::PostgreSQL::Attributes::Enum }
let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }
def decorate(model, field, options = {})
attribute_klass.include_on(model, :pg_enum)
model.pg_enum(field, **options)
end
before :each do
Torque::PostgreSQL.config.enum.base_method = :pg_enum
Torque::PostgreSQL::Attributes::Enum.include_on(ActiveRecord::Base)
# Define a method to find yet to define constants
Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:const_missing) do |name|
Torque::PostgreSQL::Attributes::Enum.lookup(name)
end
# Define a helper method to get a sample value
Torque::PostgreSQL.config.enum.namespace.define_singleton_method(:sample) do |name|
Torque::PostgreSQL::Attributes::Enum.lookup(name).sample
end
end
context 'on migration' do
it 'can be deleted' do
connection.create_enum(:status, %i(foo bar))
expect(connection.type_exists?(:status)).to be_truthy
connection.drop_type(:status)
expect(connection.type_exists?(:status)).to be_falsey
end
it 'can be renamed' do
connection.rename_type(:content_status, :status)
expect(connection.type_exists?(:content_status)).to be_falsey
expect(connection.type_exists?(:status)).to be_truthy
end
it 'inserts values at the end' do
connection.create_enum(:status, %i(foo bar))
connection.add_enum_values(:status, %i(baz qux))
expect(connection.enum_values(:status)).to be_eql(['foo', 'bar', 'baz', 'qux'])
end
it 'inserts values in the beginning' do
connection.create_enum(:status, %i(foo bar))
connection.add_enum_values(:status, %i(baz qux), prepend: true)
expect(connection.enum_values(:status)).to be_eql(['baz', 'qux', 'foo', 'bar'])
end
it 'inserts values in the middle' do
connection.create_enum(:status, %i(foo bar))
connection.add_enum_values(:status, %i(baz), after: 'foo')
expect(connection.enum_values(:status)).to be_eql(['foo', 'baz', 'bar'])
connection.add_enum_values(:status, %i(qux), before: 'bar')
expect(connection.enum_values(:status)).to be_eql(['foo', 'baz', 'qux', 'bar'])
end
it 'inserts values with prefix or suffix' do
connection.create_enum(:status, %i(foo bar))
connection.add_enum_values(:status, %i(baz), prefix: true)
connection.add_enum_values(:status, %i(qux), suffix: 'tst')
expect(connection.enum_values(:status)).to be_eql(['foo', 'bar', 'status_baz', 'qux_tst'])
end
end
context 'on value' do
let(:values) { %w(created draft published archived) }
let(:error) { Torque::PostgreSQL::Attributes::Enum::EnumError }
let(:mock_enum) do
klass = Class.new(subject.superclass)
klass.instance_variable_set(:@values, values << '15')
klass
end
subject { Enum::ContentStatus }
it 'class exists' do
namespace = Torque::PostgreSQL.config.enum.namespace
expect(namespace.const_defined?('ContentStatus')).to be_truthy
expect(subject < Torque::PostgreSQL::Attributes::Enum).to be_truthy
end
it 'lazy loads values' do
expect(subject.instance_variable_defined?(:@values)).to be_falsey
end
it 'returns the db type name' do
expect(subject.type_name).to be_eql('content_status')
end
it 'values match database values' do
expect(subject.values).to be_eql(values)
end
it 'can return a sample value' do
expect(Enum).to respond_to(:sample)
expect(Enum::ContentStatus).to respond_to(:sample)
expect(Enum::ContentStatus.sample).to satisfy { |v| values.include?(v) }
expect(Enum.sample(:content_status)).to satisfy { |v| values.include?(v) }
end
it 'values can be iterated by using each direct on class' do
expect(subject).to respond_to(:each)
expect(subject.each).to be_a(Enumerator)
expect(subject.each.entries).to be_eql(values)
end
it 'values can be reach using fetch, as in hash enums' do
expect(subject).to respond_to(:fetch)
value = subject.fetch('archived', 'archived')
expect(value).to be_a(subject)
expect(value).to be_eql(subject.archived)
value = subject.fetch('other', 'other')
expect(value).to be_nil
end
it 'values can be reach using [], as in hash enums' do
expect(subject).to respond_to(:[])
value = subject['archived']
expect(value).to be_a(subject)
expect(value).to be_eql(subject.archived)
value = subject['other']
expect(value).to be_nil
end
it 'accepts respond_to against value' do
expect(subject).to respond_to(:archived)
end
it 'allows fast creation of values' do
value = subject.draft
expect(value).to be_a(subject)
end
it 'keeps blank values as Lazy' do
expect(subject.new(nil)).to be_nil
expect(subject.new([])).to be_nil
expect(subject.new('')).to be_nil
end
it 'can start from nil value using lazy' do
lazy = Torque::PostgreSQL::Attributes::Lazy
value = subject.new(nil)
expect(value.__class__).to be_eql(lazy)
expect(value.to_s).to be_eql('')
expect(value.to_i).to be_nil
expect(value.draft?).to be_falsey
end
it 'accepts values to come from numeric' do
expect(subject.new(0)).to be_eql(subject.created)
expect { subject.new(5) }.to raise_error(error, /out of bounds/)
end
it 'accepts string initialization' do
expect(subject.new('created')).to be_eql(subject.created)
expect { subject.new('updated') }.to raise_error(error, /not valid for/)
end
it 'allows values comparison' do
value = subject.draft
expect(value).to be > subject.created
expect(value).to be < subject.archived
expect(value).to be_eql(subject.draft)
expect(value).to_not be_eql(subject.published)
end
it 'allows values comparison with string' do
value = subject.draft
expect(value).to be > :created
expect(value).to be < :archived
expect(value).to be_eql(:draft)
expect(value).to_not be_eql(:published)
end
it 'allows values comparison with symbol' do
value = subject.draft
expect(value).to be > 'created'
expect(value).to be < 'archived'
expect(value).to be_eql('draft')
expect(value).to_not be_eql('published')
end
it 'allows values comparison with number' do
value = subject.draft
expect(value).to be > 0
expect(value).to be < 3
expect(value).to be_eql(1)
expect(value).to_not be_eql(2.5)
end
it 'does not allow cross-enum comparison' do
expect { subject.draft < mock_enum.published }.to raise_error(error, /^Comparison/)
expect { subject.draft > mock_enum.created }.to raise_error(error, /^Comparison/)
end
it 'does not allow other types comparison' do
expect { subject.draft > true }.to raise_error(error, /^Comparison/)
expect { subject.draft < [] }.to raise_error(error, /^Comparison/)
end
it 'accepts value checking' do
value = subject.draft
expect(value).to respond_to(:archived?)
expect(value.draft?).to be_truthy
expect(value.published?).to be_falsey
end
it 'accepts replace and bang value' do
value = subject.draft
expect(value).to respond_to(:archived!)
expect(value.archived!).to be_eql(subject.archived)
expect(value.replace('created')).to be_eql(subject.created)
end
it 'accepts values turn into integer by its index' do
mock_value = mock_enum.new('15')
expect(subject.created.to_i).to be_eql(0)
expect(subject.archived.to_i).to be_eql(3)
expect(mock_value.to_i).to_not be_eql(15)
expect(mock_value.to_i).to be_eql(4)
end
context 'on members' do
it 'has enumerable operations' do
expect(subject).to respond_to(:all?)
expect(subject).to respond_to(:any?)
expect(subject).to respond_to(:collect)
expect(subject).to respond_to(:count)
expect(subject).to respond_to(:cycle)
expect(subject).to respond_to(:detect)
expect(subject).to respond_to(:drop)
expect(subject).to respond_to(:drop_while)
expect(subject).to respond_to(:each)
expect(subject).to respond_to(:each_with_index)
expect(subject).to respond_to(:entries)
expect(subject).to respond_to(:find)
expect(subject).to respond_to(:find_all)
expect(subject).to respond_to(:find_index)
expect(subject).to respond_to(:first)
expect(subject).to respond_to(:flat_map)
expect(subject).to respond_to(:include?)
expect(subject).to respond_to(:inject)
expect(subject).to respond_to(:lazy)
expect(subject).to respond_to(:map)
expect(subject).to respond_to(:member?)
expect(subject).to respond_to(:one?)
expect(subject).to respond_to(:reduce)
expect(subject).to respond_to(:reject)
expect(subject).to respond_to(:reverse_each)
expect(subject).to respond_to(:select)
expect(subject).to respond_to(:sort)
expect(subject).to respond_to(:zip)
end
it 'works with map' do
result = subject.map(&:to_i)
expect(result).to be_eql([0, 1, 2, 3])
end
end
end
context 'on OID' do
let(:enum) { Enum::ContentStatus }
subject { Torque::PostgreSQL::Adapter::OID::Enum.new('content_status') }
context 'on deserialize' do
it 'returns nil' do
expect(subject.deserialize(nil)).to be_nil
end
it 'returns enum' do
value = subject.deserialize('created')
expect(value).to be_a(enum)
expect(value).to be_eql(enum.created)
end
end
context 'on serialize' do
it 'returns nil' do
expect(subject.serialize(nil)).to be_nil
expect(subject.serialize('test')).to be_nil
expect(subject.serialize(15)).to be_nil
end
it 'returns as string' do
expect(subject.serialize(enum.created)).to be_eql('created')
expect(subject.serialize(1)).to be_eql('draft')
end
end
context 'on cast' do
it 'accepts nil' do
expect(subject.cast(nil)).to be_nil
end
it 'accepts invalid values as nil' do
expect(subject.cast(false)).to be_nil
expect(subject.cast(true)).to be_nil
expect(subject.cast([])).to be_nil
end
it 'accepts string' do
value = subject.cast('created')
expect(value).to be_a(enum)
expect(value).to be_eql(enum.created)
end
it 'accepts numeric' do
value = subject.cast(1)
expect(value).to be_a(enum)
expect(value).to be_eql(enum.draft)
end
end
end
context 'on I18n' do
subject { Enum::ContentStatus }
it 'has the text method' do
expect(subject.new(0)).to respond_to(:text)
end
it 'brings the correct values' do
expect(subject.new(0).text).to be_eql('1 - Created')
expect(subject.new(1).text).to be_eql('Draft (2)')
expect(subject.new(2).text).to be_eql('Finally published')
expect(subject.new(3).text).to be_eql('Archived')
end
end
context 'on model' do
let(:instance) { FactoryBot.build(:user) }
before(:each) { decorate(User, :role) }
subject { User }
it 'has all enum methods' do
expect(subject).to respond_to(:roles)
expect(subject).to respond_to(:roles_keys)
expect(subject).to respond_to(:roles_texts)
expect(subject).to respond_to(:roles_options)
expect(instance).to respond_to(:role_text)
subject.roles.each do |value|
expect(subject).to respond_to(value)
expect(instance).to respond_to(value + '?')
expect(instance).to respond_to(value + '!')
end
end
it 'plural method brings the list of values' do
result = subject.roles
expect(result).to be_a(Array)
expect(result).to be_eql(Enum::Roles.values)
end
it 'text value now uses model and attribute references' do
instance.role = :visitor
expect(instance.role_text).to be_eql('A simple Visitor')
instance.role = :assistant
expect(instance.role_text).to be_eql('An Assistant')
instance.role = :manager
expect(instance.role_text).to be_eql('The Manager')
instance.role = :admin
expect(instance.role_text).to be_eql('Super Duper Admin')
end
it 'has scopes correctly applied' do
subject.roles.each do |value|
expect(subject.send(value).to_sql).to match(/WHERE "users"."role" = '#{value}'/)
end
end
it 'has scopes available on associations' do
author = FactoryBot.create(:author)
FactoryBot.create(:post, author: author)
decorate(Post, :status)
expect(author.posts).to respond_to(:test_scope)
Enum::ContentStatus.each do |value|
expect(author.posts).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(author.posts).to respond_to(value.to_sym)
expect(author.posts.send(value).to_sql).to match(/AND "posts"."status" = '#{value}'/)
end
end
it 'ask methods work' do
instance.role = :assistant
expect(instance.manager?).to be_falsey
expect(instance.assistant?).to be_truthy
end
it 'bang methods work' do
instance.admin!
expect(instance.persisted?).to be_truthy
updated_at = instance.updated_at
Torque::PostgreSQL.config.enum.save_on_bang = false
instance.visitor!
Torque::PostgreSQL.config.enum.save_on_bang = true
expect(instance.role).to be_eql(:visitor)
expect(instance.updated_at).to be_eql(updated_at)
instance.reload
expect(instance.role).to be_eql(:admin)
end
it 'raises when starting an enum with conflicting methods' do
Torque::PostgreSQL.config.enum.raise_conflicting = true
AText = Class.new(ActiveRecord::Base)
AText.table_name = 'texts'
expect { decorate(AText, :conflict) }.to raise_error(ArgumentError, /already exists in/)
Torque::PostgreSQL.config.enum.raise_conflicting = false
end
it 'scope the model correctly' do
query = subject.manager.to_sql
expect(query).to match(/"users"."role" = 'manager'/)
end
context 'on inherited classes' do
it 'has all enum methods' do
klass = Class.new(User)
instance = klass.new
expect(klass).to respond_to(:roles)
expect(klass).to respond_to(:roles_keys)
expect(klass).to respond_to(:roles_texts)
expect(klass).to respond_to(:roles_options)
expect(instance).to respond_to(:role_text)
klass.roles.each do |value|
expect(klass).to respond_to(value)
expect(instance).to respond_to(value + '?')
expect(instance).to respond_to(value + '!')
end
end
end
context 'without autoload' do
subject { Author }
let(:instance) { FactoryBot.build(:author) }
it 'has both rails original enum and the new pg_enum' do
expect(subject).to respond_to(:enum)
expect(subject).to respond_to(:pg_enum)
expect(subject.method(:pg_enum).arity).to eql(-1)
end
it 'does not create all methods' do
AAuthor = Class.new(ActiveRecord::Base)
AAuthor.table_name = 'authors'
expect(AAuthor).to_not respond_to(:specialties)
expect(AAuthor).to_not respond_to(:specialties_keys)
expect(AAuthor).to_not respond_to(:specialties_texts)
expect(AAuthor).to_not respond_to(:specialties_options)
expect(AAuthor.instance_methods).to_not include(:specialty_text)
Enum::Specialties.values.each do |value|
expect(AAuthor).to_not respond_to(value)
expect(AAuthor.instance_methods).to_not include(value + '?')
expect(AAuthor.instance_methods).to_not include(value + '!')
end
end
it 'can be manually initiated' do
decorate(Author, :specialty)
expect(subject).to respond_to(:specialties)
expect(subject).to respond_to(:specialties_keys)
expect(subject).to respond_to(:specialties_texts)
expect(subject).to respond_to(:specialties_options)
expect(instance).to respond_to(:specialty_text)
Enum::Specialties.values.each do |value|
expect(subject).to respond_to(value)
expect(instance).to respond_to(value + '?')
expect(instance).to respond_to(value + '!')
end
end
end
context 'with prefix' do
before(:each) { decorate(Author, :specialty, prefix: 'in') }
subject { Author }
let(:instance) { FactoryBot.build(:author) }
it 'creates all methods correctly' do
expect(subject).to respond_to(:specialties)
expect(subject).to respond_to(:specialties_keys)
expect(subject).to respond_to(:specialties_texts)
expect(subject).to respond_to(:specialties_options)
expect(instance).to respond_to(:specialty_text)
subject.specialties.each do |value|
expect(subject).to respond_to('in_' + value)
expect(instance).to respond_to('in_' + value + '?')
expect(instance).to respond_to('in_' + value + '!')
end
end
end
context 'with suffix, only, and except' do
before(:each) do
decorate(Author, :specialty, suffix: 'expert', only: %w(books movies), except: 'books')
end
subject { Author }
let(:instance) { FactoryBot.build(:author) }
it 'creates only the requested methods' do
expect(subject).to respond_to('movies_expert')
expect(instance).to respond_to('movies_expert?')
expect(instance).to respond_to('movies_expert!')
expect(subject).to_not respond_to('books_expert')
expect(instance).to_not respond_to('books_expert?')
expect(instance).to_not respond_to('books_expert!')
expect(subject).to_not respond_to('plays_expert')
expect(instance).to_not respond_to('plays_expert?')
expect(instance).to_not respond_to('plays_expert!')
end
end
end
end
================================================
FILE: spec/tests/full_text_seach_test.rb
================================================
require 'spec_helper'
RSpec.describe 'FullTextSearch' do
context 'on builder' do
let(:builder) { Torque::PostgreSQL::Attributes::Builder }
describe '.to_search_weights' do
it 'works with a single column' do
expect(builder.to_search_weights('title')).to eq({ 'title' => 'A' })
expect(builder.to_search_weights(:title)).to eq({ 'title' => 'A' })
end
it 'works with an array of columns' do
value = { 'title' => 'A', 'content' => 'B' }
expect(builder.to_search_weights(%w[title content])).to eq(value)
expect(builder.to_search_weights(%i[title content])).to eq(value)
end
it 'works with a hash of columns and weights' do
value = { 'title' => 'A', 'content' => 'B', 'summary' => 'C' }
expect(builder.to_search_weights(value.transform_keys(&:to_sym))).to eq(value)
end
it 'works with a hash of columns and invalid weights' do
value = { 'title' => 'X', 'content' => 'Y', 'summary' => 'Z' }
expect(builder.to_search_weights(value.transform_keys(&:to_sym))).to eq(value)
end
end
describe '.to_search_vector_operation' do
it 'builds a simple one' do
result = builder.to_search_vector_operation('english', { 'title' => 'A' })
expect(result.to_sql).to eq("TO_TSVECTOR('english', COALESCE(title, ''))")
end
it 'builds with 2 columns' do
columns = { 'title' => 'A', 'content' => 'B' }
result = builder.to_search_vector_operation('english', columns)
expect(result.to_sql).to eq(<<~SQL.squish)
SETWEIGHT(TO_TSVECTOR('english', COALESCE(title, '')), 'A') ||
SETWEIGHT(TO_TSVECTOR('english', COALESCE(content, '')), 'B')
SQL
end
it 'builds with a dynamic language' do
columns = { 'title' => 'A', 'content' => 'B' }
result = builder.to_search_vector_operation(:lang, columns)
expect(result.to_sql).to eq(<<~SQL.squish)
SETWEIGHT(TO_TSVECTOR(lang, COALESCE(title, '')), 'A') ||
SETWEIGHT(TO_TSVECTOR(lang, COALESCE(content, '')), 'B')
SQL
end
end
describe '.search_vector_options' do
it 'correctly translates the settings' do
options = builder.search_vector_options(columns: 'title')
expect(options).to eq(
type: :tsvector,
as: "TO_TSVECTOR('english', COALESCE(title, ''))",
stored: true,
)
end
it 'properly adds the index type' do
options = builder.search_vector_options(columns: 'title', index: true)
expect(options).to eq(
type: :tsvector,
as: "TO_TSVECTOR('english', COALESCE(title, ''))",
stored: true,
index: { using: :gin },
)
end
end
end
context 'on schema dumper' do
let(:connection) { ActiveRecord::Base.connection }
let(:source) { ActiveRecord::Base.connection_pool }
let(:dump_result) do
ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))
dump_result.string
end
it 'properly supports search language' do
parts = %{t.search_language "lang", default: "english", null: false}
expect(dump_result).to include(parts)
end
it 'properly translates a simple single search vector with embedded language' do
parts = 't.search_vector "search_vector", stored: true'
parts << ', language: :lang, columns: :title'
expect(dump_result).to include(parts)
end
it 'properly translates a simple multiple column search vector with language' do
parts = 't.search_vector "search_vector", stored: true'
parts << ', language: "english", columns: [:title, :content]'
expect(dump_result).to include(parts)
end
it 'supports a custom definition of weights' do
connection.create_table :custom_search do |t|
t.string :title
t.string :content
t.string :subtitle
t.search_vector :sample_a, columns: {
title: 'A',
subtitle: 'A',
content: 'B',
}
t.search_vector :sample_b, columns: {
title: 'A',
subtitle: 'C',
content: 'D',
}
t.search_vector :sample_c, columns: {
title: 'C',
subtitle: 'B',
content: 'A',
}
end
parts = 't.search_vector "sample_a", stored: true'
parts << ', language: "english", columns: { title: "A", subtitle: "A", content: "B" }'
expect(dump_result).to include(parts)
parts = 't.search_vector "sample_b", stored: true'
parts << ', language: "english", columns: { title: "A", subtitle: "C", content: "D" }'
expect(dump_result).to include(parts)
parts = 't.search_vector "sample_c", stored: true'
parts << ', language: "english", columns: [:content, :subtitle, :title]'
expect(dump_result).to include(parts)
end
end
context 'on config' do
let(:base) { Course }
let(:scope) { 'full_text_search' }
let(:mod) { base.singleton_class.included_modules.first }
after { mod.send(:undef_method, scope) if scope.present? }
it 'has the initialization method' do
scope.replace('')
expect(base).to respond_to(:torque_search_for)
end
it 'properly generates the search scope' do
base.torque_search_for(:search_vector)
expect(base.all).to respond_to(:full_text_search)
end
it 'works with prefix and suffix' do
scope.replace('custom_full_text_search_scope')
base.torque_search_for(:search_vector, prefix: 'custom', suffix: 'scope')
expect(base.all).to respond_to(:custom_full_text_search_scope)
end
end
context 'on relation' do
let(:base) { Course }
let(:scope) { 'full_text_search' }
let(:mod) { base.singleton_class.included_modules.first }
before { Course.torque_search_for(:search_vector) }
after { mod.send(:undef_method, :full_text_search) }
it 'performs a simple query' do
result = Course.full_text_search('test')
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can include the order' do
result = Course.full_text_search('test', order: true)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('english', 'test')"
parts << ' ORDER BY TS_RANK("courses"."search_vector",'
parts << " PHRASETO_TSQUERY('english', 'test')) ASC"
expect(result.to_sql).to eql(parts)
end
it 'can include the order descending' do
result = Course.full_text_search('test', order: :desc)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('english', 'test')"
parts << ' ORDER BY TS_RANK("courses"."search_vector",'
parts << " PHRASETO_TSQUERY('english', 'test')) DESC"
expect(result.to_sql).to eql(parts)
end
it 'can include the rank' do
result = Course.full_text_search('test', rank: true)
parts = 'SELECT "courses".*, TS_RANK("courses"."search_vector",'
parts << " PHRASETO_TSQUERY('english', 'test')) AS rank"
parts << ' FROM "courses" WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can include the rank named differently' do
result = Course.full_text_search('test', rank: :custom_rank)
parts = 'SELECT "courses".*, TS_RANK("courses"."search_vector",'
parts << " PHRASETO_TSQUERY('english', 'test')) AS custom_rank"
parts << ' FROM "courses" WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can use default query mode' do
result = Course.full_text_search('test', mode: :default)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " TO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can use plain query mode' do
result = Course.full_text_search('test', mode: :plain)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " PLAINTO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can use web query mode' do
result = Course.full_text_search('test', mode: :web)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " WEBSEARCH_TO_TSQUERY('english', 'test')"
expect(result.to_sql).to eql(parts)
end
it 'can use a attribute as the language' do
result = Course.full_text_search('test', language: :lang)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << %{ PHRASETO_TSQUERY("courses"."lang", 'test')}
expect(result.to_sql).to eql(parts)
end
it 'can call a method to pull the language' do
Course.define_singleton_method(:search_language) { 'portuguese' }
result = Course.full_text_search('test', language: :search_language)
parts = 'SELECT "courses".* FROM "courses"'
parts << ' WHERE "courses"."search_vector" @@'
parts << " PHRASETO_TSQUERY('portuguese', 'test')"
expect(result.to_sql).to eql(parts)
Course.singleton_class.undef_method(:search_language)
end
it 'properly binds all provided values' do
query = Course.full_text_search('test')
sql, binds = get_query_with_binds { query.load }
expect(sql).to include("PHRASETO_TSQUERY($1, $2)")
expect(binds.first.value).to eq('english')
expect(binds.second.value).to eq('test')
end
it 'raises an error when the language is not found' do
expect do
Course.full_text_search('test', language: '')
end.to raise_error(ArgumentError, /Unable to determine language/)
end
it 'raises an error when the mode is invalid' do
expect do
Course.full_text_search('test', mode: :invalid)
end.to raise_error(ArgumentError, /Invalid mode :invalid for full text search/)
end
end
end
================================================
FILE: spec/tests/function_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Function' do
let(:helper) { Torque::PostgreSQL::FN }
let(:conn) { ActiveRecord::Base.connection }
let(:visitor) { ::Arel::Visitors::PostgreSQL.new(conn) }
let(:collector) { ::Arel::Collectors::SQLString }
context 'on helper' do
it 'helps creating a bind' do
type = ::ActiveRecord::Type::String.new
expect(helper.bind(:foo, 'test', type)).to be_a(::Arel::Nodes::BindParam)
end
it 'helps creating a bind for a model attribute' do
expect(helper.bind_for(Video, :title, 'test')).to be_a(::Arel::Nodes::BindParam)
end
it 'helps creating a bind for an arel attribute' do
attr = Video.arel_table['title']
expect(helper.bind_with(attr, 'test')).to be_a(::Arel::Nodes::BindParam)
end
it 'helps concatenating arguments' do
values = %w[a b c].map(&::Arel.method(:sql))
# Unable to just call .sql with a simple thing
visited = visitor.accept(helper.concat(values[0]), collector.new)
expect(visited.value).to eq("a")
# 2+ we can call .sql directly
expect(helper.concat(values[0], values[1]).to_sql).to eq("a || b")
expect(helper.concat(values[0], values[1], values[2]).to_sql).to eq("a || b || c")
end
it 'helps building any other function' do
values = %w[a b c].map(&::Arel.method(:sql))
expect(helper).to respond_to(:coalesce)
expect(helper.coalesce(values[0], values[1]).to_sql).to eq("COALESCE(a, b)")
end
end
end
================================================
FILE: spec/tests/geometric_builder_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Geometries' do
context 'on build' do
let(:klass) do
klass = Class.new(Torque::PostgreSQL::GeometryBuilder)
klass.define_singleton_method(:name) { 'TestSample' }
klass.const_set('PIECES', %i[a b c d].freeze)
klass.const_set('FORMATION', '(%s, %s, <%s, {%s}>)'.freeze)
klass
end
let(:instance) { klass.new }
context '#type' do
it 'originally does not have the constant defined' do
expect(klass.constants).not_to include('TYPE')
end
it 'creates the type constant based on the name' do
expect(instance.type).to be_eql(:test_sample)
expect(klass.constants).to include(:TYPE)
expect(klass::TYPE).to be_eql(:test_sample)
end
it 'returns the constant value' do
klass.const_set('TYPE', 'another_type')
expect(instance.type).to be_eql('another_type')
end
end
context '#pieces' do
it 'returns the definition pieces' do
expect(instance.pieces).to be_eql([:a, :b, :c, :d])
end
it 'returns whatever is in the constant' do
klass.send(:remove_const, 'PIECES')
klass.const_set('PIECES', %i[a].freeze)
expect(instance.pieces).to be_eql([:a])
end
end
context '#formation' do
it 'returns the definition set' do
expect(instance.formation).to be_eql("(%s, %s, <%s, {%s}>)")
end
it 'returns whatever is in the constant' do
klass.send(:remove_const, 'FORMATION')
klass.const_set('FORMATION', '(<%s>)'.freeze)
expect(instance.formation).to be_eql("(<%s>)")
end
end
context '#cast' do
let(:config_class) { double }
before { allow(instance).to receive(:config_class).and_return(config_class) }
it 'accepts string values' do
expect(instance.cast('')).to be_nil
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)
expect(instance.cast('1, 2, 3, 4')).to be_eql(4)
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(8)
expect(instance.cast('(1, {2}, <3>, 4)')).to be_eql(8)
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(7)
expect(instance.cast('1, 2, 3, 4, 5, 6')).to be_eql(7)
expect(config_class).to receive(:new).with(1.0, 2.0, 3.0, 4.0).and_return(1)
expect(instance.cast('1.0, 2.0, 3.0, 4.0')).to be_eql(1)
expect { instance.cast(['6 6 6']) }.to raise_error(RuntimeError, 'Invalid format')
end
it 'accepts hash values' do
expect(instance.cast({})).to be_nil
expect { instance.cast({ 'a' => 1, 'b' => 2 }) }.to raise_error(RuntimeError, 'Invalid format')
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)
expect(instance.cast({ 'a' => 1, 'b' => 2 , 'c' => 3, 'd' => 4})).to be_eql(4)
expect(config_class).to receive(:new).with(1.0, 2.0, 3.0, 4.0).and_return(5)
expect(instance.cast({ 'a' => 1.0, 'b' => 2.0, 'c' => 3.0, 'd' => 4.0})).to be_eql(5)
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(2)
expect(instance.cast({ a: 1, b: 2 , c: 3, d: 4, e: 5, f: 6})).to be_eql(2)
end
it 'accepts array values' do
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(4)
expect(instance.cast([1, 2, 3, 4])).to be_eql(4)
expect(config_class).to receive(:new).with(1.1, 1.2, 1.3, 1.4).and_return(9)
expect(instance.cast(['1.1', '1.2', '1.3', '1.4'])).to be_eql(9)
expect(config_class).to receive(:new).with(6, 5, 4, 3).and_return(2)
expect(instance.cast([6, 5, 4, 3, 2, 1])).to be_eql(2)
expect(instance.cast([])).to be_nil
expect { instance.cast([6, 5, 4]) }.to raise_error(RuntimeError, 'Invalid format')
end
end
context '#serialize' do
before { allow(instance).to receive(:config_class).and_return(OpenStruct) }
it 'return value nil' do
expect(instance.serialize(nil)).to be_nil
end
it 'accepts config class' do
expect(instance.serialize(OpenStruct.new)).to be_nil
expect(instance.serialize(OpenStruct.new(a: 1, b: 2, c: 3, d: 4))).to be_eql('(1, 2, <3, {4}>)')
expect(instance.serialize(OpenStruct.new(a: 1, b: 2, c: 3, d: 4, e: 5))).to be_eql('(1, 2, <3, {4}>)')
end
it 'accepts hash value' do
expect { instance.cast({a: 1, b: 2, c: 3}) }.to raise_error(RuntimeError, 'Invalid format')
expect(instance.serialize({a: 1, b: 2, c: 3, d: 4})).to be_eql('(1, 2, <3, {4}>)')
expect(instance.serialize({a: 1, b: 2, c: 3, d: 4, e: 5, f: 6})).to be_eql('(1, 2, <3, {4}>)')
end
it 'accepts array value' do
expect { instance.serialize([6, 5, 4]) }.to raise_error(RuntimeError, 'Invalid format')
expect(instance.serialize([1, 2, 3, 4])).to be_eql('(1, 2, <3, {4}>)')
expect(instance.serialize([5, 4, 3, 2, 1, 0])).to be_eql('(5, 4, <3, {2}>)')
end
end
context '#deserialize' do
let(:config_class) { double }
before { allow(instance).to receive(:config_class).and_return(config_class) }
it 'return value nil' do
expect(instance.deserialize(nil)).to be_nil
end
it 'accept correct format' do
expect(config_class).to receive(:new).with(1, 2, 3, 4).and_return(6)
expect(instance.deserialize('(1, 2, <3, {4}>)')).to be_eql(6)
end
end
context '#type_cast_for_schema' do
before { allow(instance).to receive(:config_class).and_return(OpenStruct) }
it 'returns the array for schema' do
result = instance.type_cast_for_schema(OpenStruct.new(a: 1, b: 2, c: 3, d: 4))
expect(result).to be_eql([1, 2, 3, 4])
end
end
end
context 'on box' do
let(:klass) { Torque::PostgreSQL::Adapter::OID::Box }
let(:value_klass) { Torque::PostgreSQL::Box }
let(:instance) { klass.new }
let(:value_instance) { instance.cast([1, 2, 3, 4]) }
before { allow(instance).to receive(:config_class).and_return(value_klass) }
it '#points' do
mock_klass = Struct.new(:a, :b)
Torque::PostgreSQL.config.geometry.point_class = mock_klass
result = value_instance.points
expect(result).to be_a(Array)
expect(result.size).to be_eql(4)
expect(result).to all(be_a(mock_klass))
expect(result[0].a).to be_eql(1.0)
expect(result[0].b).to be_eql(2.0)
expect(result[1].a).to be_eql(1.0)
expect(result[1].b).to be_eql(4.0)
expect(result[2].a).to be_eql(3.0)
expect(result[2].b).to be_eql(2.0)
expect(result[3].a).to be_eql(3.0)
expect(result[3].b).to be_eql(4.0)
end
end
context 'on circle' do
let(:klass) { Torque::PostgreSQL::Adapter::OID::Circle }
let(:value_klass) { Torque::PostgreSQL::Circle }
let(:instance) { klass.new }
let(:value_instance) { instance.cast([1, 2, 3]) }
before { allow(instance).to receive(:config_class).and_return(value_klass) }
it '#center' do
mock_klass = Struct.new(:a, :b)
Torque::PostgreSQL.config.geometry.point_class = mock_klass
result = value_instance.center
expect(result).to be_a(mock_klass)
expect(result.a).to be_eql(1.0)
expect(result.b).to be_eql(2.0)
end
it '#center=' do
mock_klass = Struct.new(:x, :y)
Torque::PostgreSQL.config.geometry.point_class = mock_klass
value_instance.center = [1, 2]
expect(value_instance.x).to be_eql(1)
expect(value_instance.y).to be_eql(2)
value_instance.center = mock_klass.new(3, 4)
expect(value_instance.x).to be_eql(3)
expect(value_instance.y).to be_eql(4)
end
end
end
================================================
FILE: spec/tests/has_many_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'HasMany' do
context 'on builder' do
let(:builder) { ActiveRecord::Associations::Builder::HasMany }
it 'adds the array option' do
expect(builder.send(:valid_options, {})).to include(:array)
end
end
context 'on original' do
let(:other) { Text }
let(:key) { :texts }
before { User.has_many :texts }
subject { User.create(name: 'User 1') }
after { User._reflections = {} }
it 'has the method' do
expect(subject).to respond_to(:texts)
expect(subject._reflections).to include(key)
end
it 'has correct foreign key' do
item = subject._reflections[key]
expect(item.foreign_key).to be_eql('user_id')
end
it 'loads associated records' do
expect(subject.texts.to_sql).to match(Regexp.new(<<-SQL.squish))
SELECT "texts"\\.\\* FROM "texts" WHERE \\(?"texts"\\."user_id" = #{subject.id}\\)?
SQL
expect(subject.texts.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.texts.to_a).to be_eql([])
end
it 'can be marked as loaded' do
expect(subject.texts.loaded?).to be_eql(false)
expect(subject.texts).to respond_to(:load_target)
expect(subject.texts.load_target).to be_eql([])
expect(subject.texts.loaded?).to be_eql(true)
end
it 'can find specific records' do
records = FactoryBot.create_list(:text, 10, user_id: subject.id)
ids = records.map(&:id).sample(5)
expect(subject.texts).to respond_to(:find)
records = subject.texts.find(*ids)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return last n records' do
records = FactoryBot.create_list(:text, 10, user_id: subject.id)
ids = records.map(&:id).last(5)
expect(subject.texts).to respond_to(:last)
records = subject.texts.last(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return first n records' do
records = FactoryBot.create_list(:text, 10, user_id: subject.id)
ids = records.map(&:id).first(5)
expect(subject.texts).to respond_to(:take)
records = subject.texts.take(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can build an associated record' do
record = subject.texts.build(content: 'Test')
expect(record).to be_a(other)
expect(record).not_to be_persisted
expect(record.content).to be_eql('Test')
expect(record.user_id).to be_eql(subject.id)
expect(subject.save).to be_truthy
expect(subject.texts.size).to be_eql(1)
end
it 'can create an associated record' do
record = subject.texts.create(content: 'Test')
expect(subject.texts).to respond_to(:create!)
expect(record).to be_a(other)
expect(record).to be_persisted
expect(record.content).to be_eql('Test')
expect(record.user_id).to be_eql(subject.id)
end
it 'can concat records' do
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.size).to be_eql(1)
subject.texts.concat(other.new(content: 'Test'))
expect(subject.texts.size).to be_eql(2)
expect(subject.texts.last.content).to be_eql('Test')
end
it 'can replace records' do
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.size).to be_eql(1)
subject.texts.replace([other.new(content: 'Test 1'), other.new(content: 'Test 2')])
expect(subject.texts.size).to be_eql(2)
expect(subject.texts[0].content).to be_eql('Test 1')
expect(subject.texts[1].content).to be_eql('Test 2')
end
it 'can delete all records' do
FactoryBot.create_list(:text, 5, user_id: subject.id)
expect(subject.texts.size).to be_eql(5)
subject.texts.delete_all
expect(subject.texts.size).to be_eql(0)
end
it 'can destroy all records' do
FactoryBot.create_list(:text, 5, user_id: subject.id)
expect(subject.texts.size).to be_eql(5)
subject.texts.destroy_all
expect(subject.texts.size).to be_eql(0)
end
it 'can have sum operations' do
result = FactoryBot.create_list(:text, 5, user_id: subject.id).map(&:id).reduce(:+)
expect(subject.texts).to respond_to(:sum)
expect(subject.texts.sum(:id)).to be_eql(result)
end
it 'can have a pluck operation' do
result = FactoryBot.create_list(:text, 5, user_id: subject.id).map(&:content).sort
expect(subject.texts).to respond_to(:pluck)
expect(subject.texts.pluck(:content).sort).to be_eql(result)
end
it 'can be markes as empty' do
expect(subject.texts).to respond_to(:empty?)
expect(subject.texts.empty?).to be_truthy
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.empty?).to be_falsey
end
it 'can check if a record is included on the list' do
inside = FactoryBot.create(:text, user_id: subject.id)
outside = FactoryBot.create(:text)
expect(subject.texts).to respond_to(:include?)
expect(subject.texts.include?(inside)).to be_truthy
expect(subject.texts.include?(outside)).to be_falsey
end
it 'can append records' do
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.size).to be_eql(1)
subject.texts << other.new(content: 'Test')
expect(subject.texts.size).to be_eql(2)
expect(subject.texts.last.content).to be_eql('Test')
end
it 'can clear records' do
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.size).to be_eql(1)
subject.texts.clear
expect(subject.texts.size).to be_eql(0)
end
it 'can reload records' do
expect(subject.texts.size).to be_eql(0)
FactoryBot.create(:text, user_id: subject.id)
expect(subject.texts.size).to be_eql(0)
subject.texts.reload
expect(subject.texts.size).to be_eql(1)
end
it 'can preload records' do
FactoryBot.create_list(:text, 5, user_id: subject.id)
entries = User.all.includes(:texts).load
expect(entries.size).to be_eql(1)
expect(entries.first.texts).to be_loaded
expect(entries.first.texts.size).to be_eql(5)
end
it 'can joins records' do
query = User.all.joins(:texts)
expect(query.to_sql).to match(/INNER JOIN "texts"/)
expect { query.load }.not_to raise_error
end
context 'with query constraint' do
let(:activity) { Activity.create! }
before do
skip('Only Rails 7.1 onwards') unless Post.respond_to?(:query_constraints)
Post.query_constraints :author_id, :id
Activity.query_constraints :author_id, :id
Activity.has_many :posts
end
after do
Post.instance_variable_set(:@has_query_constraints, false)
Post.instance_variable_set(:@query_constraints_list, nil)
Post.instance_variable_set(:@_query_constraints_list, nil)
Activity.instance_variable_set(:@has_query_constraints, false)
Activity.instance_variable_set(:@query_constraints_list, nil)
Activity.instance_variable_set(:@_query_constraints_list, nil)
end
it 'properly preload records' do
FactoryBot.create_list(:post, 5, activity: activity)
entries = Activity.all.includes(:posts).load
expect(entries.size).to be_eql(1)
expect(entries.first.posts).to be_loaded
expect(entries.first.posts.size).to be_eql(5)
end
it 'properly preload records using preloader' do
FactoryBot.create_list(:post, 5, activity: activity)
entries = ActiveRecord::Associations::Preloader.new(
records: Activity.all,
associations: [:posts],
).call.first.records_by_owner
expect(entries.size).to be_eql(1)
expect(entries.values.first.size).to be_eql(5)
end
end
end
context 'on array' do
let(:other) { Video }
let(:key) { :videos }
before { Tag.has_many :videos, array: true }
subject { Tag.create(name: 'A') }
after { Tag._reflections = {} }
it 'has the method' do
expect(subject).to respond_to(:videos)
expect(subject._reflections).to include(key)
end
it 'has correct foreign key' do
item = subject._reflections[key]
expect(item.foreign_key).to be_eql('tag_ids')
end
it 'loads associated records' do
expect(subject.videos.to_sql).to eq(<<~SQL.squish)
SELECT "videos".* FROM "videos" WHERE #{subject.id} = ANY("videos"."tag_ids")
SQL
expect(subject.videos.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.videos.to_a).to be_eql([])
end
it 'uses binds instead of the literal value' do
query = subject.videos
sql, binds = get_query_with_binds { query.load }
expect(sql).to include('WHERE $1 = ANY("videos"."tag_ids")')
expect(binds.first.value).to eq(subject.id)
end
it 'can be marked as loaded' do
expect(subject.videos.loaded?).to be_eql(false)
expect(subject.videos).to respond_to(:load_target)
expect(subject.videos.load_target).to be_eql([])
expect(subject.videos.loaded?).to be_eql(true)
end
it 'can find specific records' do
records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])
ids = records.map(&:id).sample(5)
expect(subject.videos).to respond_to(:find)
records = subject.videos.find(*ids)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return last n records' do
records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])
ids = records.map(&:id).last(5)
expect(subject.videos).to respond_to(:last)
records = subject.videos.last(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can return first n records' do
records = FactoryBot.create_list(:video, 10, tag_ids: [subject.id])
ids = records.map(&:id).first(5)
expect(subject.videos).to respond_to(:take)
records = subject.videos.take(5)
expect(records.size).to be_eql(5)
expect(records.map(&:id).sort).to be_eql(ids.sort)
end
it 'can build an associated record' do
record = subject.videos.build(title: 'Test')
expect(record).to be_a(other)
expect(record).not_to be_persisted
expect(record.title).to be_eql('Test')
expect(subject.save).to be_truthy
expect(record.tag_ids).to be_eql([subject.id])
expect(subject.videos.size).to be_eql(1)
end
it 'can create an associated record' do
record = subject.videos.create(title: 'Test')
expect(subject.videos).to respond_to(:create!)
expect(record).to be_a(other)
expect(record).to be_persisted
expect(record.title).to be_eql('Test')
expect(record.tag_ids).to be_eql([subject.id])
end
it 'can perist after accessed in after_create' do
other.belongs_to_many(:tags)
other.after_create { self.tags.to_a }
video = FactoryBot.create(:video)
subject.videos << video
expect(subject.reload.videos.size).to eql(1)
expect(video.reload.tags.size).to eql(1)
other.reset_callbacks(:create)
other._reflections = {}
end
it 'can concat records' do
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(1)
subject.videos.concat(other.new(title: 'Test'))
expect(subject.videos.size).to be_eql(2)
expect(subject.videos.last.title).to be_eql('Test')
end
it 'can replace records' do
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(1)
subject.videos.replace([other.new(title: 'Test 1'), other.new(title: 'Test 2')])
expect(subject.videos.size).to be_eql(2)
expect(subject.videos[0].title).to be_eql('Test 1')
expect(subject.videos[1].title).to be_eql('Test 2')
end
it 'can delete all records' do
FactoryBot.create_list(:video, 5, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(5)
subject.videos.delete_all
expect(subject.videos.size).to be_eql(0)
end
it 'can destroy all records' do
FactoryBot.create_list(:video, 5, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(5)
subject.videos.destroy_all
expect(subject.videos.size).to be_eql(0)
end
it 'can have sum operations' do
result = FactoryBot.create_list(:video, 5, tag_ids: [subject.id]).map(&:id).reduce(:+)
expect(subject.videos).to respond_to(:sum)
expect(subject.videos.sum(:id)).to be_eql(result)
end
it 'can have a pluck operation' do
result = FactoryBot.create_list(:video, 5, tag_ids: [subject.id]).map(&:title).sort
expect(subject.videos).to respond_to(:pluck)
expect(subject.videos.pluck(:title).sort).to be_eql(result)
end
it 'can be markes as empty' do
expect(subject.videos).to respond_to(:empty?)
expect(subject.videos.empty?).to be_truthy
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.empty?).to be_falsey
end
it 'can check if a record is included on the list' do
inside = FactoryBot.create(:video, tag_ids: [subject.id])
outside = FactoryBot.create(:video)
expect(subject.videos).to respond_to(:include?)
expect(subject.videos.include?(inside)).to be_truthy
expect(subject.videos.include?(outside)).to be_falsey
end
it 'can append records' do
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(1)
subject.videos << other.new(title: 'Test')
expect(subject.videos.size).to be_eql(2)
expect(subject.videos.last.title).to be_eql('Test')
end
it 'can clear records' do
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(1)
subject.videos.clear
expect(subject.videos.size).to be_eql(0)
end
it 'can reload records' do
expect(subject.videos.size).to be_eql(0)
FactoryBot.create(:video, tag_ids: [subject.id])
expect(subject.videos.size).to be_eql(0)
subject.videos.reload
expect(subject.videos.size).to be_eql(1)
end
it 'can preload records' do
FactoryBot.create_list(:video, 5, tag_ids: [subject.id])
entries = Tag.all.includes(:videos).load
expect(entries.size).to be_eql(1)
expect(entries.first.videos).to be_loaded
expect(entries.first.videos.size).to be_eql(5)
end
it 'can joins records' do
query = Tag.all.joins(:videos)
expect(query.to_sql).to match(/INNER JOIN "videos"/)
expect { query.load }.not_to raise_error
end
end
context 'using uuid' do
let(:connection) { ActiveRecord::Base.connection }
let(:game) { Class.new(ActiveRecord::Base) }
let(:player) { Class.new(ActiveRecord::Base) }
# TODO: Set as a shred example
before do
connection.create_table(:players, id: :uuid) { |t| t.string :name }
connection.create_table(:games, id: :uuid) { |t| t.uuid :player_ids, array: true }
game.table_name = 'games'
player.table_name = 'players'
player.has_many :games, array: true, anonymous_class: game,
inverse_of: false, foreign_key: :player_ids
end
subject { player.create }
it 'loads associated records' do
expect(subject.games.to_sql).to eq(<<~SQL.squish)
SELECT "games".* FROM "games"
WHERE '#{subject.id}' = ANY("games"."player_ids")
SQL
expect(subject.games.load).to be_a(ActiveRecord::Associations::CollectionProxy)
expect(subject.games.to_a).to be_eql([])
end
it 'uses binds instead of the literal value' do
query = subject.games
sql, binds = get_query_with_binds { query.load }
expect(sql).to include('WHERE $1 = ANY("games"."player_ids")')
expect(binds.first.value).to eq(subject.id)
end
it 'can preload records' do
5.times { game.create(player_ids: [subject.id]) }
entries = player.all.includes(:games).load
expect(entries.size).to be_eql(1)
expect(entries.first.games).to be_loaded
expect(entries.first.games.size).to be_eql(5)
end
it 'can joins records' do
query = player.all.joins(:games)
expect(query.to_sql).to match(/INNER JOIN "games"/)
expect { query.load }.not_to raise_error
end
end
end
================================================
FILE: spec/tests/insert_all_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'InsertAll' do
context 'on executing' do
before do
ActiveRecord::InsertAll.send(:public, :to_sql)
allow_any_instance_of(ActiveRecord::InsertAll).to receive(:execute, &:to_sql)
end
subject { Tag }
let(:entries) { [{ name: 'A' }, { name: 'B' }] }
it 'does not mess with insert_all' do
result = subject.insert_all(entries)
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT DO NOTHING RETURNING "id"
SQL
result = subject.insert_all(entries, returning: %i[name])
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT DO NOTHING RETURNING "name"
SQL
result = subject.insert_all(entries, returning: %i[id name])
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT DO NOTHING RETURNING "id","name"
SQL
end
it 'does not mess with insert_all!' do
result = subject.insert_all!(entries)
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B') RETURNING "id"
SQL
result = subject.insert_all!(entries, returning: %i[name])
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B') RETURNING "name"
SQL
end
it 'does not mess with upsert without where' do
result = subject.upsert_all(entries)
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT ("id") DO UPDATE SET "name"=excluded."name"
RETURNING "id"
SQL
result = subject.upsert_all(entries, returning: %i[name])
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT ("id") DO UPDATE SET "name"=excluded."name"
RETURNING "name"
SQL
end
it 'does add the where condition without the returning clause' do
result = subject.upsert_all(entries, returning: false, where: '1=1')
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT ("id") DO UPDATE SET "name"=excluded."name"
WHERE 1=1
SQL
end
it 'does add the where condition with the returning clause' do
result = subject.upsert_all(entries, where: '1=1')
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT ("id") DO UPDATE SET "name"=excluded."name"
WHERE 1=1 RETURNING "id"
SQL
end
xit 'dows work with model-based where clause' do
result = subject.upsert_all(entries, where: Tag.where(name: 'C'))
expect(result.squish).to be_eql(<<~SQL.squish)
INSERT INTO "tags" ("name") VALUES ('A'), ('B')
ON CONFLICT ("id") DO UPDATE SET "name"=excluded."name"
WHERE "tags"."name" = 'C' RETURNING "id"
SQL
end
end
end
================================================
FILE: spec/tests/interval_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Interval' do
let(:table_definition) { ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition }
let(:connection) { ActiveRecord::Base.connection }
let(:source) { ActiveRecord::Base.connection_pool }
context 'on settings' do
it 'must be set to ISO 8601' do
expect(connection.select_value('SHOW IntervalStyle')).to eql('iso_8601')
end
end
context 'on table definition' do
subject { table_definition.new(connection, 'articles') }
it 'has the interval method' do
expect(subject).to respond_to(:interval)
end
it 'can define an interval column' do
subject.interval('duration')
expect(subject['duration'].name).to eql('duration')
expect(subject['duration'].type).to eql(:interval)
end
end
context 'on schema' do
it 'can be used on tables too' do
dump_io = StringIO.new
ActiveRecord::SchemaDumper.dump(source, dump_io)
expect(dump_io.string).to match /t\.interval +"duration"/
end
end
context 'on OID' do
let(:reference) { 1.year + 2.months + 3.days + 4.hours + 5.minutes + 6.seconds }
subject { Torque::PostgreSQL::Adapter::OID::Interval.new }
context 'on deserialize' do
it 'returns nil' do
expect(subject.deserialize(nil)).to be_nil
end
it 'returns duration' do
value = subject.deserialize('P1Y2M3DT4H5M6S')
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(reference)
end
end
context 'on serialize' do
it 'returns nil' do
expect(subject.serialize(nil)).to be_nil
end
it 'returns seconds as string' do
expect(subject.serialize(3600.seconds)).to eq('PT3600S')
end
it 'retruns sample as string' do
expect(subject.serialize(reference)).to eq('P1Y2M3DT4H5M6S')
end
it 'transforms weeks into days' do
reference = subject.cast(1000000)
expect(subject.serialize(reference)).to eq('P11DT13H46M40S')
end
end
context 'on cast' do
it 'accepts nil' do
expect(subject.cast(nil)).to be_nil
end
it 'accepts string' do
value = subject.cast('P1Y2M3DT4H5M6S')
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(reference)
end
it 'accepts duration' do
value = subject.cast(5.days)
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eql(value)
end
it 'accepts small seconds numeric' do
value = subject.cast(30)
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(30)
end
it 'accepts long seconds numeric' do
value = subject.cast(reference.to_i)
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(reference)
end
it 'accepts array with Y-M-D H:M:S format' do
value = subject.cast([1, 2, 3, 4, 5, 6])
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(reference)
end
it 'accepts array with empty values' do
value = subject.cast([nil, 0, 12, 30, 0])
sample = 12.hours + 30.minutes
expect(value).to be_a(ActiveSupport::Duration)
expect(value.inspect).to eq(sample.inspect)
expect(value).to eq(sample)
end
it 'accepts array with string' do
value = subject.cast(['45', '15'])
sample = 45.minutes + 15.seconds
expect(value).to be_a(ActiveSupport::Duration)
expect(value.inspect).to eq(sample.inspect)
expect(value).to eq(sample)
end
it 'accepts hash' do
value = subject.cast({years: 1, months: 2, days: 3, hours: 4, minutes: 5, seconds: 6})
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(reference)
end
it 'accepts hash with extra elements' do
value = subject.cast({extra: 1, hours: 12, minutes: 30})
sample = 12.hours + 30.minutes
expect(value).to be_a(ActiveSupport::Duration)
expect(value).to eq(sample)
end
it 'returns any other type of value as it is' do
value = subject.cast(true)
expect(value).to eql(true)
end
end
end
context 'on I18n' do
it 'transforms the value into singular text' do
expect(I18n.l 1.year).to eql('1 year')
expect(I18n.l 1.months).to eql('1 month')
expect(I18n.l 1.weeks).to eql('1 week')
expect(I18n.l 1.days).to eql('1 day')
expect(I18n.l 1.hours).to eql('1 hour')
expect(I18n.l 1.minutes).to eql('1 minute')
expect(I18n.l 1.seconds).to eql('1 second')
end
it 'transforms the value into plural text' do
expect(I18n.l 2.year).to eql('2 years')
expect(I18n.l 2.months).to eql('2 months')
expect(I18n.l 2.weeks).to eql('2 weeks')
expect(I18n.l 2.days).to eql('2 days')
expect(I18n.l 2.hours).to eql('2 hours')
expect(I18n.l 2.minutes).to eql('2 minutes')
expect(I18n.l 2.seconds).to eql('2 seconds')
end
it 'transforms multiple values' do
value = 1.year + 2.months + 3.days + 4.hours + 5.minutes + 6.seconds
expect(I18n.l value).to eql('1 year, 2 months, 3 days, 4 hours, 5 minutes, and 6 seconds')
end
end
end
================================================
FILE: spec/tests/lazy_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Lazy', type: :helper do
subject { Torque::PostgreSQL::Attributes::Lazy }
it 'is consider nil' do
expect(subject.new(String, '')).to be_nil
end
it 'inspects as nil' do
expect(subject.new(String, '').inspect).to be_eql('nil')
end
it 'compares to nil only' do
expect(subject.new(String, '') == nil).to be_truthy
expect(subject.new(String, '') == '').to be_falsey
expect(subject.new(String, '') == 0).to be_falsey
end
it 'starts the object only on method call' do
expect(subject.new(String, '').to_s).to be_a(String)
expect(subject.new(String, '')).to respond_to(:chop)
end
end
================================================
FILE: spec/tests/period_spec.rb
================================================
require 'spec_helper'
# TODO: Convert to shared examples
RSpec.describe 'Period' do
let(:model) { Class.new(TimeKeeper) }
let(:instance) { model.new }
let(:fields) { %i[available period tzperiod] }
let(:method_names) { Torque::PostgreSQL::config.period.method_names }
let(:attribute_klass) { Torque::PostgreSQL::Attributes::Period }
let(:true_value) { 'TRUE' }
let(:false_value) { 'FALSE' }
let(:klass_methods_range) { (0..22) }
let(:instance_methods_range) { (23..29) }
let(:klass_method_names) { method_names.to_a[klass_methods_range].to_h }
let(:instance_method_names) { method_names.to_a[instance_methods_range].to_h }
before { Time.zone = 'UTC' }
def decorate(model, field, options = {})
attribute_klass.include_on(model, :period_for)
model.period_for(field, **options)
end
context 'on config' do
let(:direct_method_names) do
list = method_names.dup
list.merge!(Torque::PostgreSQL::config.period.direct_method_names)
list.values.map { |v| v.gsub(/_?%s_?/, '') }
end
let(:other_method_names) do
method_names.transform_values.with_index { |_, idx| "p__#{idx}" }
end
it 'has definition method on the model' do
attribute_klass.include_on(ActiveRecord::Base, :period_for)
expect(model).to respond_to(:period_for)
ActiveRecord::Base.singleton_class.send(:undef_method, :period_for)
end
it 'create the methods with custom names' do
decorate(model, :tzperiod, threshold: 5.minutes, methods: other_method_names)
klass_method_names.size.times do |i|
expect(model).to respond_to("p__#{i}")
end
initial = instance_methods_range.min
instance_method_names.size.times do |i|
expect(instance).to respond_to("p__#{initial + i}")
end
end
it 'creates non prefixed methods if requested' do
decorate(model, :tzperiod, prefixed: false, threshold: 5.minutes)
direct_method_names[klass_methods_range].each do |m|
expect(model).to respond_to(m)
end
direct_method_names[instance_methods_range].each do |m|
expect(instance).to respond_to(m)
end
end
end
context 'on tsrange' do
let(:type) { :tsrange }
let(:value) { Time.zone.now.beginning_of_minute }
let(:db_field) { '"time_keepers"."period"' }
let(:db_value) { "'#{value.strftime('%F %T')}'" }
let(:cast_type) { '::timestamp' }
let(:cast_db_value) { "#{db_value}#{cast_type}" }
let(:empty_condition) { "#{type.to_s.upcase}(NULL, NULL)" }
let(:nullif_condition) { "NULLIF(#{db_field}, #{empty_condition})" }
let(:date_type) { :daterange }
let(:lower_date) { "LOWER(#{db_field})::date" }
let(:upper_date) { "UPPER(#{db_field})::date" }
let(:date_db_field) { "#{date_type.to_s.upcase}(#{lower_date}, #{upper_date}, '[]')" }
context 'on model' do
before { decorate(model, :period) }
it 'queries current on period' do
expect(model.period_on(value).to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})
SQL
end
it 'queries current period' do
expect(model.current_period.to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @>
SQL
expect(model.current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries not current period' do
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
NOT (COALESCE(#{nullif_condition} @>
SQL
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries containing period' do
expect(model.period_containing(:test).to_sql).to include(<<-SQL.squish)
#{db_field} @> "time_keepers"."test"
SQL
expect(model.period_containing(value).to_sql).to include(<<-SQL.squish)
#{db_field} @> #{db_value}
SQL
end
it 'queries not containing period' do
expect(model.period_not_containing(:test).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} @> "time_keepers"."test")
SQL
expect(model.period_not_containing(value).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} @> #{db_value})
SQL
end
it 'queries overlapping period' do
expect(model.period_overlapping(:test).to_sql).to include(<<-SQL.squish)
#{db_field} && "time_keepers"."test"
SQL
expect(model.period_overlapping(value, value).to_sql).to include(<<-SQL.squish)
#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})
SQL
end
it 'queries not overlapping period' do
expect(model.period_not_overlapping(:test).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} && "time_keepers"."test")
SQL
expect(model.period_not_overlapping(value, value).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value}))
SQL
end
it 'queries starting after period' do
expect(model.period_starting_after(:test).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) > "time_keepers"."test"
SQL
expect(model.period_starting_after(value).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) > #{db_value}
SQL
end
it 'queries starting before period' do
expect(model.period_starting_before(:test).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) < "time_keepers"."test"
SQL
expect(model.period_starting_before(value).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) < #{db_value}
SQL
end
it 'queries finishing after period' do
expect(model.period_finishing_after(:test).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) > "time_keepers"."test"
SQL
expect(model.period_finishing_after(value).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) > #{db_value}
SQL
end
it 'queries finishing before period' do
expect(model.period_finishing_before(:test).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) < "time_keepers"."test"
SQL
expect(model.period_finishing_before(value).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) < #{db_value}
SQL
end
it 'does not have real starting after for period' do
expect(model.all).not_to respond_to(:real_starting_after)
end
it 'does not have real starting before for period' do
expect(model.all).not_to respond_to(:real_starting_before)
end
it 'does not have real finishing after for period' do
expect(model.all).not_to respond_to(:real_finishing_after)
end
it 'does not have real finishing before for period' do
expect(model.all).not_to respond_to(:real_finishing_before)
end
it 'queries containing date period' do
expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> "time_keepers"."test"
SQL
expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> #{db_value}::date
SQL
end
it 'queries not containing date period' do
expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> "time_keepers"."test")
SQL
expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> #{db_value}::date)
SQL
end
it 'queries overlapping date period' do
expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} && "time_keepers"."test"
SQL
expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)
SQL
end
it 'queries not overlapping date period' do
expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && "time_keepers"."test")
SQL
expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))
SQL
end
it 'does not have real containing date period' do
expect(model.all).not_to respond_to(:period_real_containing_date)
end
it 'does not have real overlapping date period' do
expect(model.all).not_to respond_to(:period_real_overlapping_date)
end
end
context 'on instance' do
before { decorate(model, :period) }
it 'checks for current value' do
instance.period = 1.hour.ago.utc..1.hour.from_now.utc
expect(instance).to be_current_period
instance.period = 4.hour.from_now.utc..6.hour.from_now.utc
expect(instance).not_to be_current_period
instance.period = [nil, 4.hours.ago.utc]
expect(instance).not_to be_current_period
instance.period = [4.hours.from_now.utc, nil]
expect(instance).not_to be_current_period
instance.period = [nil, nil]
expect(instance).to be_current_period
end
it 'checks fro current based on a value' do
instance.period = 1.hour.ago.utc..1.hour.from_now.utc
expect(instance).to be_current_period_on(5.minutes.from_now.utc)
instance.period = 4.hour.from_now.utc..6.hour.from_now.utc
expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)
end
it 'returns the start time' do
instance.period = 1.hour.ago.utc..1.hour.from_now.utc
expect(instance.period_start).to be_eql(instance.period.min)
instance.period = 4.hour.from_now.utc..6.hour.from_now.utc
expect(instance.period_start).to be_eql(instance.period.min)
end
it 'returns the finish time' do
instance.period = 1.hour.ago.utc..1.hour.from_now.utc
expect(instance.period_finish).to be_eql(instance.period.max)
instance.period = 4.hour.from_now.utc..6.hour.from_now.utc
expect(instance.period_finish).to be_eql(instance.period.max)
end
end
context 'with field threshold' do
before { decorate(model, :period, threshold: :th) }
let(:lower_db_field) { "(LOWER(#{db_field}) - #{threshold_value})" }
let(:upper_db_field) { "(UPPER(#{db_field}) + #{threshold_value})" }
let(:threshold_value) { '"time_keepers"."th"' }
let(:threshold_db_field) { "#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})" }
let(:nullif_condition) { "NULLIF(#{threshold_db_field}, #{empty_condition})" }
let(:threshold_date_db_field) do
"DATERANGE(#{lower_db_field}::date, #{upper_db_field}::date, '[]')"
end
context 'on model' do
it 'queries current on period' do
expect(model.period_on(value).to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})
SQL
end
it 'queries current period' do
expect(model.current_period.to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @>
SQL
expect(model.current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries not current period' do
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
NOT (COALESCE(#{nullif_condition} @>
SQL
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries real containing period' do
expect(model.period_real_containing(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> "time_keepers"."test"
SQL
expect(model.period_real_containing(value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> #{db_value}
SQL
end
it 'queries real overlapping period' do
expect(model.period_real_overlapping(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && "time_keepers"."test"
SQL
expect(model.period_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})
SQL
end
it 'queries real starting after for period' do
expect(model.period_real_starting_after(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > "time_keepers"."test"
SQL
expect(model.period_real_starting_after(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > #{db_value}
SQL
end
it 'queries real starting before for period' do
expect(model.period_real_starting_before(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < "time_keepers"."test"
SQL
expect(model.period_real_starting_before(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < #{db_value}
SQL
end
it 'queries real finishing after for period' do
expect(model.period_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > "time_keepers"."test"
SQL
expect(model.period_real_finishing_after(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > #{db_value}
SQL
end
it 'queries real finishing before for period' do
expect(model.period_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < "time_keepers"."test"
SQL
expect(model.period_real_finishing_before(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < #{db_value}
SQL
end
it 'queries containing date period' do
expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> "time_keepers"."test"
SQL
expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> #{db_value}::date
SQL
end
it 'queries not containing date period' do
expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> "time_keepers"."test")
SQL
expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> #{db_value}::date)
SQL
end
it 'queries overlapping date period' do
expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} && "time_keepers"."test"
SQL
expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)
SQL
end
it 'queries not overlapping date period' do
expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && "time_keepers"."test")
SQL
expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))
SQL
end
it 'queries real containing date period' do
expect(model.period_real_containing_date(:test).to_sql).to include(<<-SQL.squish)
#{threshold_date_db_field} @> "time_keepers"."test"
SQL
expect(model.period_real_containing_date(value).to_sql).to include(<<-SQL.squish)
#{threshold_date_db_field} @> #{db_value}::date
SQL
end
it 'queries real overlapping date period' do
expect(model.period_real_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
#{threshold_date_db_field} && "time_keepers"."test"
SQL
expect(model.period_real_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
#{threshold_date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)
SQL
end
end
context 'on instance' do
before { decorate(model, :period, threshold: :th) }
before { instance.th = 1.hour }
it 'checks for current value' do
instance.period = nil
expect(instance).to be_current_period
instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
expect(instance).to be_current_period
instance.period = (Time.zone.now + 90.minutes)..(Time.zone.now + 3.hour)
expect(instance).not_to be_current_period
end
it 'checks for current based on a value' do
instance.period = nil
expect(instance).to be_current_period_on(5.minutes.from_now.utc)
instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
expect(instance).to be_current_period_on(5.minutes.from_now.utc)
instance.period = 90.minutes.from_now.utc..3.hour.from_now.utc
expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)
end
it 'returns the real range' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.real_period.min).to be_eql(value.min - 1.hour)
expect(instance.real_period.max).to be_eql(value.max + 1.hour)
end
it 'returns the real start' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.period_real_start).to be_eql(value.min - 1.hour)
end
it 'returns the real finish' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.period_real_finish).to be_eql(value.max + 1.hour)
end
end
end
context 'with value threshold' do
before { decorate(model, :period, threshold: 5.minutes) }
let(:lower_db_field) { "(LOWER(#{db_field}) - #{threshold_value})" }
let(:upper_db_field) { "(UPPER(#{db_field}) + #{threshold_value})" }
let(:threshold_value) { "'300 seconds'::interval" }
let(:threshold_db_field) { "#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})" }
let(:nullif_condition) { "NULLIF(#{threshold_db_field}, #{empty_condition})" }
context 'on model' do
it 'queries current on period' do
expect(model.period_on(value).to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @> #{cast_db_value}, #{true_value})
SQL
end
it 'queries current period' do
expect(model.current_period.to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @>
SQL
expect(model.current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries not current period' do
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
NOT (COALESCE(#{nullif_condition} @>
SQL
expect(model.not_current_period.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{true_value})
SQL
end
it 'queries real containing period' do
expect(model.period_real_containing(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> "time_keepers"."test"
SQL
expect(model.period_real_containing(value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> #{db_value}
SQL
end
it 'queries real overlapping period' do
expect(model.period_real_overlapping(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && "time_keepers"."test"
SQL
expect(model.period_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})
SQL
end
it 'queries real starting after for period' do
expect(model.period_real_starting_after(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > "time_keepers"."test"
SQL
expect(model.period_real_starting_after(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > #{db_value}
SQL
end
it 'queries real starting before for period' do
expect(model.period_real_starting_before(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < "time_keepers"."test"
SQL
expect(model.period_real_starting_before(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < #{db_value}
SQL
end
it 'queries real finishing after for period' do
expect(model.period_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > "time_keepers"."test"
SQL
expect(model.period_real_finishing_after(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > #{db_value}
SQL
end
it 'queries real finishing before for period' do
expect(model.period_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < "time_keepers"."test"
SQL
expect(model.period_real_finishing_before(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < #{db_value}
SQL
end
it 'queries containing date period' do
expect(model.period_containing_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> "time_keepers"."test"
SQL
expect(model.period_containing_date(value).to_sql).to include(<<-SQL.squish)
#{date_db_field} @> #{db_value}
SQL
end
it 'queries not containing date period' do
expect(model.period_not_containing_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> "time_keepers"."test")
SQL
expect(model.period_not_containing_date(value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} @> #{db_value}::date)
SQL
end
it 'queries overlapping date period' do
expect(model.period_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
#{date_db_field} && "time_keepers"."test"
SQL
expect(model.period_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date)
SQL
end
it 'queries not overlapping date period' do
expect(model.period_not_overlapping_date(:test).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && "time_keepers"."test")
SQL
expect(model.period_not_overlapping_date(value, value).to_sql).to include(<<-SQL.squish)
NOT (#{date_db_field} && #{date_type.to_s.upcase}(#{db_value}::date, #{db_value}::date))
SQL
end
end
context 'on instance' do
before { decorate(model, :period, threshold: 45.minutes) }
it 'checks for current value' do
instance.period = nil
expect(instance).to be_current_period
instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
expect(instance).to be_current_period
instance.period = (Time.zone.now + 90.minutes)..(Time.zone.now + 3.hour)
expect(instance).not_to be_current_period
end
it 'checks for current based on a value' do
instance.period = nil
expect(instance).to be_current_period_on(5.minutes.from_now.utc)
instance.period = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
expect(instance).to be_current_period_on(5.minutes.from_now.utc)
instance.period = 90.minutes.from_now.utc..3.hour.from_now.utc
expect(instance).not_to be_current_period_on(5.minutes.from_now.utc)
end
it 'returns the real range' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.real_period.min).to be_eql(value.min - 45.minutes)
expect(instance.real_period.max).to be_eql(value.max + 45.minutes)
end
it 'returns the real start' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.period_real_start).to be_eql(value.min - 45.minutes)
end
it 'returns the real finish' do
value = (Time.zone.now - 1.hour)..(Time.zone.now + 1.hour)
instance.period = value
expect(instance.period_real_finish).to be_eql(value.max + 45.minutes)
end
end
end
end
context 'on daterange' do
let(:type) { :daterange }
let(:value) { Date.today }
let(:db_field) { '"time_keepers"."available"' }
let(:db_value) { "'#{value.strftime('%F')}'" }
let(:cast_type) { '::date' }
let(:cast_db_value) { "#{db_value}#{cast_type}" }
let(:empty_condition) { "#{type.to_s.upcase}(NULL, NULL)" }
let(:nullif_condition) { "NULLIF(#{threshold_db_field}, #{empty_condition})" }
let(:lower_db_field) { "(LOWER(#{db_field}) - #{threshold_value})::date" }
let(:upper_db_field) { "(UPPER(#{db_field}) + #{threshold_value})::date" }
let(:threshold_value) { "'86400 seconds'::interval" }
let(:threshold_db_field) { "#{type.to_s.upcase}(#{lower_db_field}, #{upper_db_field})" }
before { decorate(model, :available, pessimistic: true, threshold: 1.day) }
context 'on model' do
it 'queries current on available' do
expect(model.available_on(value).to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @> #{cast_db_value}, #{false_value})
SQL
end
it 'queries current available' do
expect(model.current_available.to_sql).to include(<<-SQL.squish)
COALESCE(#{nullif_condition} @>
SQL
expect(model.current_available.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{false_value})
SQL
end
it 'queries not current available' do
expect(model.not_current_available.to_sql).to include(<<-SQL.squish)
NOT (COALESCE(#{nullif_condition} @>
SQL
expect(model.not_current_available.to_sql).to include(<<-SQL.squish)
#{cast_type}, #{false_value})
SQL
end
it 'queries containing available' do
expect(model.available_containing(:test).to_sql).to include(<<-SQL.squish)
#{db_field} @> "time_keepers"."test"
SQL
expect(model.available_containing(value).to_sql).to include(<<-SQL.squish)
#{db_field} @> #{db_value}
SQL
end
it 'queries not containing available' do
expect(model.available_not_containing(:test).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} @> "time_keepers"."test")
SQL
expect(model.available_not_containing(value).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} @> #{db_value})
SQL
end
it 'queries overlapping available' do
expect(model.available_overlapping(:test).to_sql).to include(<<-SQL.squish)
#{db_field} && "time_keepers"."test"
SQL
expect(model.available_overlapping(value, value).to_sql).to include(<<-SQL.squish)
#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})
SQL
end
it 'queries not overlapping available' do
expect(model.available_not_overlapping(:test).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} && "time_keepers"."test")
SQL
expect(model.available_not_overlapping(value, value).to_sql).to include(<<-SQL.squish)
NOT (#{db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value}))
SQL
end
it 'queries starting after available' do
expect(model.available_starting_after(:test).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) > "time_keepers"."test"
SQL
expect(model.available_starting_after(value).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) > #{db_value}
SQL
end
it 'queries starting before available' do
expect(model.available_starting_before(:test).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) < "time_keepers"."test"
SQL
expect(model.available_starting_before(value).to_sql).to include(<<-SQL.squish)
LOWER(#{db_field}) < #{db_value}
SQL
end
it 'queries finishing after available' do
expect(model.available_finishing_after(:test).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) > "time_keepers"."test"
SQL
expect(model.available_finishing_after(value).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) > #{db_value}
SQL
end
it 'queries finishing before available' do
expect(model.available_finishing_before(:test).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) < "time_keepers"."test"
SQL
expect(model.available_finishing_before(value).to_sql).to include(<<-SQL.squish)
UPPER(#{db_field}) < #{db_value}
SQL
end
it 'queries real containing available' do
expect(model.available_real_containing(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> "time_keepers"."test"
SQL
expect(model.available_real_containing(value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} @> #{db_value}
SQL
end
it 'queries real overlapping available' do
expect(model.available_real_overlapping(:test).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && "time_keepers"."test"
SQL
expect(model.available_real_overlapping(value, value).to_sql).to include(<<-SQL.squish)
#{threshold_db_field} && #{type.to_s.upcase}(#{db_value}, #{db_value})
SQL
end
it 'queries real starting after for available' do
expect(model.available_real_starting_after(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > "time_keepers"."test"
SQL
expect(model.available_real_starting_after(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} > #{db_value}
SQL
end
it 'queries real starting before for available' do
expect(model.available_real_starting_before(:test).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < "time_keepers"."test"
SQL
expect(model.available_real_starting_before(value).to_sql).to include(<<-SQL.squish)
#{lower_db_field} < #{db_value}
SQL
end
it 'queries real finishing after for available' do
expect(model.available_real_finishing_after(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > "time_keepers"."test"
SQL
expect(model.available_real_finishing_after(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} > #{db_value}
SQL
end
it 'queries real finishing before for available' do
expect(model.available_real_finishing_before(:test).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < "time_keepers"."test"
SQL
expect(model.available_real_finishing_before(value).to_sql).to include(<<-SQL.squish)
#{upper_db_field} < #{db_value}
SQL
end
it 'does not query containing date available' do
expect(model.all).not_to respond_to(:available_containing_date)
end
it 'does not query not containing date available' do
expect(model.all).not_to respond_to(:available_not_containing_date)
end
it 'does not query overlapping date available' do
expect(model.all).not_to respond_to(:available_overlapping_date)
end
it 'does not query not overlapping date available' do
expect(model.all).not_to respond_to(:available_not_overlapping_date)
end
it 'does not query real containing date available' do
expect(model.all).not_to respond_to(:available_real_containing_date)
end
it 'does not query real overlapping date available' do
expect(model.all).not_to respond_to(:available_real_overlapping_date)
end
end
context 'on instance' do
it 'checks for current value' do
instance.available = nil
expect(instance).not_to be_current_available
instance.available = Date.yesterday..Date.tomorrow
expect(instance).to be_current_available
instance.available = Date.new.prev_month..Date.new.next_month
expect(instance).not_to be_current_available
end
it 'checks fro current based on a value' do
instance.available = nil
expect(instance).not_to be_current_available_on(Date.tomorrow)
instance.available = Date.yesterday..Date.tomorrow
expect(instance).to be_current_available_on(Date.tomorrow)
instance.available = Date.new.prev_month..Date.new.next_month
expect(instance).to be_current_available_on(Date.new.next_month)
end
it 'returns the start date' do
instance.available = Date.yesterday..Date.tomorrow
expect(instance.available_start).to be_eql(instance.available.min)
instance.available = Date.new.prev_month..Date.new.next_month
expect(instance.available_start).to be_eql(instance.available.min)
end
it 'returns the finish date' do
instance.available = Date.yesterday..Date.tomorrow
expect(instance.available_finish).to be_eql(instance.available.max)
instance.available = Date.new.prev_month..Date.new.next_month
expect(instance.available_finish).to be_eql(instance.available.max)
end
it 'returns the real range' do
value = Date.yesterday..Date.tomorrow
instance.available = value
expect(instance.real_available.min).to be_eql(value.min.prev_day)
expect(instance.real_available.max).to be_eql(value.max.next_day)
end
it 'returns the real start date' do
instance.available = Date.yesterday..Date.tomorrow
expect(instance.available_real_start).to be_eql(instance.available.min.prev_day)
end
it 'returns the real finish date' do
instance.available = Date.yesterday..Date.tomorrow
expect(instance.available_real_finish).to be_eql(instance.available.max.next_day)
end
end
end
end
================================================
FILE: spec/tests/predicate_builder_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'PredicateBuilder' do
describe 'on enumerator lazy' do
let(:timed_out_error) do
Torque::PostgreSQL::PredicateBuilder::EnumeratorLazyHandler::Timeout
end
subject { Video.all }
after do
Torque::PostgreSQL.config.predicate_builder.lazy_timeout = 0.02
Torque::PostgreSQL.config.predicate_builder.lazy_limit = 2_000
end
it 'works with provided value' do
sql = subject.where(id: [1,2,3].lazy).to_sql
expect(sql).to include("WHERE \"videos\".\"id\" IN (1, 2, 3)")
end
it 'handles gracefully a timeout' do
Torque::PostgreSQL.config.predicate_builder.lazy_timeout = 0.01
Torque::PostgreSQL.config.predicate_builder.lazy_limit = nil
expect { subject.where(id: (1..).lazy).to_sql }.to raise_error(timed_out_error)
end
it 'handles properly a limit' do
Torque::PostgreSQL.config.predicate_builder.lazy_timeout = nil
Torque::PostgreSQL.config.predicate_builder.lazy_limit = 2
sql = subject.where(id: [1,2,3].lazy).to_sql
expect(sql).to include("WHERE \"videos\".\"id\" IN (1, 2)")
end
end
describe 'on arel attribute' do
subject { Item.all }
it 'works with both plain attributes' do
sql = subject.where(id: Item.arel_table[:id]).to_sql
expect(sql).to include("WHERE \"items\".\"id\" = \"items\".\"id\"")
end
it 'works when when the left side is an array' do
sql = subject.where(tag_ids: Item.arel_table[:id]).to_sql
expect(sql).to include("WHERE \"items\".\"id\" = ANY(\"items\".\"tag_ids\")")
end
it 'works when the right side is an array' do
sql = subject.where(id: Item.arel_table[:tag_ids]).to_sql
expect(sql).to include("WHERE \"items\".\"id\" = ANY(\"items\".\"tag_ids\")")
end
it 'works when both are arrays' do
sql = subject.where(tag_ids: Item.arel_table[:tag_ids]).to_sql
expect(sql).to include("WHERE \"items\".\"tag_ids\" && \"items\".\"tag_ids\"")
end
end
describe 'on array' do
subject { Item.all }
before { Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = true }
after { Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = false }
it 'works with plain array when disabled' do
Torque::PostgreSQL.config.predicate_builder.handle_array_attributes = false
sql = subject.where(tag_ids: 1).to_sql
expect(sql).to include("WHERE \"items\".\"tag_ids\" = 1")
sql = subject.where(tag_ids: [1, 2, 3]).to_sql
expect(sql).to include("WHERE \"items\".\"tag_ids\" = '{1,2,3}'")
end
it 'works with a single value' do
sql = subject.where(tag_ids: 1).to_sql
expect(sql).to include("WHERE 1 = ANY(\"items\".\"tag_ids\")")
end
it 'works with an array value' do
sql = subject.where(tag_ids: [1, 2, 3]).to_sql
expect(sql).to include("WHERE \"items\".\"tag_ids\" && '{1,2,3}'")
end
it 'works with an empty array' do
sql = subject.where(tag_ids: []).to_sql
expect(sql).to include("WHERE CARDINALITY(\"items\".\"tag_ids\") = 0")
end
it 'properly binds the provided values' do
sql, binds = get_query_with_binds { subject.where(tag_ids: 1).load }
expect(sql).to include("WHERE $1 = ANY(\"items\".\"tag_ids\")")
expect(binds.first.value).to eq(1)
sql, binds = get_query_with_binds { subject.where(tag_ids: [1, 2, 3]).load }
expect(sql).to include("WHERE \"items\".\"tag_ids\" && $1")
expect(binds.first.value).to eq([1, 2, 3])
sql, binds = get_query_with_binds { subject.where(tag_ids: []).load }
expect(sql).to include("WHERE CARDINALITY(\"items\".\"tag_ids\") = 0")
expect(binds).to be_empty
end
end
describe 'on regexp' do
subject { Video.all }
it 'works with a basic regular expression' do
sql = subject.where(title: /(a|b)/).to_sql
expect(sql).to include("WHERE \"videos\".\"title\" ~ '(a|b)'")
end
it 'works with a case-insensitive regular expression' do
sql = subject.where(title: /(a|b)/i).to_sql
expect(sql).to include("WHERE \"videos\".\"title\" ~* '(a|b)'")
end
it 'works with characters that need escape' do
sql = subject.where(title: %r{a|'|"|\\}).to_sql
expect(sql).to include("WHERE \"videos\".\"title\" ~ 'a|''|\"|\\\\'")
end
it 'properly binds the provided value' do
query = subject.where(title: /(a|b)/)
sql, binds = get_query_with_binds { query.load }
expect(sql).to include("WHERE \"videos\".\"title\" ~ $1")
expect(binds.first.value).to eq('(a|b)')
end
end
end
================================================
FILE: spec/tests/quoting_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Quoting', type: :helper do
let(:connection) { ActiveRecord::Base.connection }
context 'on type names' do
it 'accepts type name only' do
expect(connection.quote_type_name('sample')).to eql('"public"."sample"')
end
it 'accepts schema and type name' do
expect(connection.quote_type_name('other.sample')).to eql('"other"."sample"')
end
it 'accepts schema as a parameter' do
expect(connection.quote_type_name('sample', 'test')).to eql('"test"."sample"')
end
it 'always prefer the schema from parameter' do
expect(connection.quote_type_name('nothis.sample', 'this')).to eql('"this"."sample"')
end
end
end
================================================
FILE: spec/tests/relation_spec.rb
================================================
require 'spec_helper'
RSpec::Matchers.define :be_attributes_as do |list|
match do |other|
other.each_with_index.map do |item, idx|
item.relation.name == list[idx][0] && item.name.to_s == list[idx][1]
end.all?
end
end
RSpec.describe 'Relation', type: :helper do
context 'on resolving columns' do
subject { Post.unscoped.method(:resolve_column) }
def attribute(relation, name)
result = Arel::Attributes::Attribute.new
result.relation = relation
result.name = name
result
end
it 'asserts sql literals' do
check = ['name', 'other.title']
expect(subject.call(check)).to eql(check)
end
it 'asserts attribute symbols' do
check = [:title, :content]
result = [['posts', 'title'], ['posts', 'content']]
expect(subject.call(check)).to be_attributes_as(result)
end
it 'asserts direct hash relations' do
check = [:title, author: :name]
result = [['posts', 'title'], ['authors', 'name']]
expect(subject.call(check)).to be_attributes_as(result)
end
it 'asserts multiple values on hash definition' do
check = [author: [:name, :age]]
result = [['authors', 'name'], ['authors', 'age']]
expect(subject.call(check)).to be_attributes_as(result)
end
it 'raises on relation not present' do
check = [supervisors: :name]
expect{ subject.call(check) }.to raise_error(ArgumentError, /Relation for/)
end
it 'raises on third level access' do
check = [author: [comments: :body]]
expect{ subject.call(check) }.to raise_error(ArgumentError, /on third level/)
end
end
context 'on joining series' do
let(:source) { Video.all }
it 'works' do
list = create_list(:video, 5)[1..4]
range = list.first.id..list.last.id
expect(source.join_series(range, with: :id).to_a).to eq(list)
expect(source.join_series(range, with: :id, step: 3).to_a).to eq([list.first, list.last])
end
it 'produces the right SQL' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'
sql += ' AS series ON "series" = "videos"."id"'
expect(source.join_series(1..10, with: :id).to_sql).to eq(sql)
end
it 'can be renamed' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'
sql += ' AS seq ON "seq" = "videos"."id"'
expect(source.join_series(1..10, with: :id, as: :seq).to_sql).to eq(sql)
end
it 'can contain the step' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer, 2::integer)'
sql += ' AS series ON "series" = "videos"."id"'
expect(source.join_series(1..10, with: :id, step: 2).to_sql).to eq(sql)
end
it 'works with float values' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES(1.0::numeric, 10.0::numeric, 0.5::numeric)'
sql += ' AS series ON "series" = "videos"."id"'
expect(source.join_series(1.0..10.0, with: :id, step: 0.5).to_sql).to eq(sql)
end
it 'works with time values' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES('
sql += "'2025-01-01 00:00:00'::timestamp, '2025-01-01 01:00:00'::timestamp"
sql += ", 'PT1M'::interval"
sql += ') AS series ON "series" = "videos"."created_at"'
range = (Time.utc(2025, 1, 1, 0)..Time.utc(2025, 1, 1, 1))
expect(source.join_series(range, with: :created_at, step: 1.minute).to_sql).to eq(sql)
end
it 'works with date values' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES('
sql += "'2025-01-01 00:00:00'::timestamp, '2025-01-02 00:00:00'::timestamp"
sql += ", 'P1D'::interval"
sql += ') AS series ON "series" = "videos"."created_at"'
range = (Date.new(2025, 1, 1)..Date.new(2025, 1, 2))
expect(source.join_series(range, with: :created_at, step: 1.day).to_sql).to eq(sql)
end
it 'works with time with zones values' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES('
sql += "'2025-01-01 00:00:00'::timestamptz, '2025-01-01 01:00:00'::timestamptz"
sql += ", 'PT1M'::interval"
sql += ') AS series ON "series" = "videos"."id"'
left = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 0)
right = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 1)
expect(source.join_series(left..right, with: :id, step: 1.minute).to_sql).to eq(sql)
end
it 'can provide the additional time zone value' do
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES('
sql += "'2025-01-01 00:00:00'::timestamptz, '2025-01-01 01:00:00'::timestamptz"
sql += ", 'PT1M'::interval, 'UTC'::text"
sql += ') AS series ON "series" = "videos"."id"'
left = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 0)
right = ActiveSupport::TimeZone['UTC'].local(2025, 1, 1, 1)
query = source.join_series(left..right, with: :id, step: 1.minute, time_zone: 'UTC')
expect(query.to_sql).to eq(sql)
end
it 'can use other types of joins' do
sql = ' LEFT OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'
expect(source.join_series(1..10, with: :id, mode: :left).to_sql).to include(sql)
sql = ' RIGHT OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'
expect(source.join_series(1..10, with: :id, mode: :right).to_sql).to include(sql)
sql = ' FULL OUTER JOIN GENERATE_SERIES(1::integer, 10::integer)'
expect(source.join_series(1..10, with: :id, mode: :full).to_sql).to include(sql)
end
it 'supports a complex way of joining' do
query = source.join_series(1..10) do |series, table|
table['id'].lteq(series)
end
sql = 'SELECT "videos".* FROM "videos"'
sql += ' INNER JOIN GENERATE_SERIES(1::integer, 10::integer)'
sql += ' AS series ON "videos"."id" <= "series"'
expect(query.to_sql).to eq(sql)
end
it 'properly binds all provided values' do
query = source.join_series(1..10, with: :id, step: 2)
sql, binds = get_query_with_binds { query.load }
expect(sql).to include('GENERATE_SERIES($1::integer, $2::integer, $3::integer)')
expect(binds.map(&:value)).to eq([1, 10, 2])
end
context 'on errors' do
it 'does not support non-range values' do
expect do
source.join_series(1, with: :id)
end.to raise_error(ArgumentError, /Range/)
end
it 'does not support beginless ranges' do
expect do
source.join_series(..10, with: :id)
end.to raise_error(ArgumentError, /Beginless/)
end
it 'does not support endless ranges' do
expect do
source.join_series(1.., with: :id)
end.to raise_error(ArgumentError, /Endless/)
end
it 'requires a step when using non-numeric ranges' do
range = Date.new(2025, 1, 1)..Date.new(2025, 1, 10)
expect do
source.join_series(range, with: :id)
end.to raise_error(ArgumentError, /:step/)
end
it 'has strict type of join support' do
expect do
source.join_series(1..10, with: :id, mode: :cross)
end.to raise_error(ArgumentError, /join type/)
end
it 'requires a :with keyword' do
expect do
source.join_series(1..10)
end.to raise_error(ArgumentError, /:with/)
end
it 'does not support unexpected values' do
expect do
source.join_series(1..10, step: :other)
end.to raise_error(ArgumentError, /value type/)
end
end
end
context 'on buckets' do
let(:source) { User.all }
it 'produces the right query' do
query = source.buckets(:age, 0..50, count: 5)
sql, binds = get_query_with_binds { query.load }
expect(sql).to include(<<~SQL.squish)
WIDTH_BUCKET("users"."age", $1::numeric, $2::numeric, $3::integer) AS bucket
SQL
expect(binds.map(&:value)).to eq([0, 50, 5])
end
it 'can query records by buckets' do
list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]
query = source.buckets(:age, 0..50, count: 5).records
expect(query).to be_a(Hash)
expect(query.keys).to match_array([0...10, 10...20])
expect(query[0...10]).to match_array([list[0], list[1]])
expect(query[10...20]).to match_array([list[2]])
end
it 'can query buckets of roles' do
list = [create(:user, role: :visitor)]
list << create(:user, role: :assistant)
list << create(:user, role: :manager)
query = source.buckets(:role, %w[assistant manager], cast: :roles).records
expect(query).to be_a(Hash)
expect(query.keys).to match_array([nil, 'assistant', 'manager'])
expect(query[nil]).to eq([list[0]])
expect(query['assistant']).to eq([list[1]])
expect(query['manager']).to eq([list[2]])
end
it 'works with calculations' do
list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]
query = source.buckets(:age, 0..50, count: 5).count
expect(query).to be_a(Hash)
expect(query.keys).to match_array([0...10, 10...20])
expect(query[0...10]).to eq(2)
expect(query[10...20]).to eq(1)
end
it 'works with other types of calculations' do
list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]
query = source.buckets(:age, 0..50, count: 5).sum(:age)
expect(query).to be_a(Hash)
expect(query.keys).to match_array([0...10, 10...20])
expect(query[0...10]).to eq(10)
expect(query[10...20]).to eq(15)
end
it 'work with joins and merge' do
list = [create(:user, age: 5), create(:user, age: 5), create(:user, age: 15)]
records = [create(:comment, user: list[0], content: 'Hello')]
records << create(:comment, user: list[1], content: 'World')
records << create(:comment, user: list[2], content: 'Test')
query = Comment.joins(:user).merge(source.buckets(:age, 0..50, count: 5)).records
expect(query).to be_a(Hash)
expect(query.keys).to match_array([0...10, 10...20])
expect(query[0...10]).to match_array([records[0], records[1]])
expect(query[10...20]).to match_array([records[2]])
end
end
end
================================================
FILE: spec/tests/schema_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'Schema' do
let(:connection) { ActiveRecord::Base.connection }
let(:source) { ActiveRecord::Base.connection_pool }
before do
connection.instance_variable_set(:@schemas_blacklist, nil)
connection.instance_variable_set(:@schemas_whitelist, nil)
end
context 'on migration' do
it 'can check for existence' do
expect(connection.schema_exists?(:information_schema)).to be_falsey
expect(connection.schema_exists?(:information_schema, filtered: false)).to be_truthy
end
it 'can be created' do
expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey
connection.create_schema(:legacy)
expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy
end
it 'can be deleted' do
expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey
connection.create_schema(:legacy)
expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy
connection.drop_schema(:legacy)
expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey
end
it 'works with whitelist' do
expect(connection.schema_exists?(:legacy)).to be_falsey
connection.create_schema(:legacy)
expect(connection.schema_exists?(:legacy)).to be_falsey
expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy
connection.schemas_whitelist.push('legacy')
expect(connection.schema_exists?(:legacy)).to be_truthy
end
context 'reverting' do
let(:migration) { ActiveRecord::Migration::Current.new('Testing') }
before do
allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages
connection.create_schema(:legacy)
end
it 'reverts the creation of a schema' do
expect(connection.schema_exists?(:legacy, filtered: false)).to be_truthy
migration.revert { migration.connection.create_schema(:legacy) }
expect(connection.schema_exists?(:legacy, filtered: false)).to be_falsey
end
it 'reverts the creation of a table' do
connection.create_table(:users, schema: :legacy) { |t| t.string(:name) }
expect(connection.table_exists?('legacy.users')).to be_truthy
migration.revert { migration.connection.create_table(:users, schema: :legacy) }
expect(connection.table_exists?('legacy.users')).to be_falsey
end
end
end
context 'on schema' do
let(:dump_result) do
ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))
dump_result.string
end
it 'does not add when there is no extra schemas' do
connection.drop_schema(:internal, force: :cascade)
expect(dump_result).not_to match /Custom schemas defined in this database/
end
it 'does not include tables from blacklisted schemas' do
connection.schemas_blacklist.push('internal')
expect(dump_result).not_to match /create_table \"users\",.*schema: +"internal"/
end
context 'with internal schema whitelisted' do
before { connection.schemas_whitelist.push('internal') }
it 'dumps the schemas' do
expect(dump_result).to match /create_schema \"internal\"/
end
it 'shows the internal users table in the connection tables list' do
expect(connection.tables).to include('internal.users')
end
it 'dumps tables on whitelisted schemas' do
expect(dump_result).to match /create_table \"users\",.*schema: +"internal"/
end
end
it 'does not affect serial ids' do
connection.create_table(:primary_keys, id: :serial) do |t|
t.string :title
end
parts = '"primary_keys", id: :serial, force: :cascade'
expect(dump_result).to match(/create_table #{parts} do /)
end
end
context 'on relation' do
let(:model) { Internal::User }
let(:table_name) { Torque::PostgreSQL::TableName.new(model, 'users') }
it 'adds the schema to the query' do
model.reset_table_name
expect(table_name.to_s).to eq('internal.users')
expect(model.all.to_sql).to match(/FROM "internal"."users"/)
end
it 'can load the schema from the module' do
allow(Internal).to receive(:schema).and_return('internal')
allow(model).to receive(:schema).and_return(nil)
model.reset_table_name
expect(table_name.to_s).to eq('internal.users')
expect(model.all.to_sql).to match(/FROM "internal"."users"/)
end
it 'does not change anything if the model has not configured a schema' do
allow(model).to receive(:schema).and_return(nil)
model.reset_table_name
expect(table_name.to_s).to eq('users')
expect(model.all.to_sql).to match(/FROM "users"/)
end
end
end
================================================
FILE: spec/tests/table_inheritance_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'TableInheritance' do
let(:connection) { ActiveRecord::Base.connection }
context 'on migration' do
mock_create_table
it 'does not affect some basic forms of table creation' do
sql = connection.create_table('schema_migrations', id: false) do |t|
t.string :version, **connection.internal_string_options_for_primary_key
end
result = 'CREATE TABLE "schema_migrations"'
result << ' \("version" character varying( NOT NULL)? PRIMARY KEY\)'
expect(sql).to match(/#{result}/)
end
it 'does not affect simple table creation' do
sql = connection.create_table(:activities) do |t|
t.string :title
t.boolean :active
t.timestamps
end
result = 'CREATE TABLE "activities" ('
result << '"id" bigserial primary key'
result << ', "title" character varying'
result << ', "active" boolean'
result << ', "created_at" timestamp(6) NOT NULL'
result << ', "updated_at" timestamp(6) NOT NULL'
result << ')'
expect(sql).to eql(result)
end
it 'does not affect temporary table creation based on a query' do
query = 'SELECT * FROM "authors"'
sql = connection.create_table(:test, temporary: true, as: query)
result = 'CREATE TEMPORARY TABLE "test"'
result << " AS #{query}"
expect(sql).to eql(result)
end
it 'adds the inherits statement for a single inheritance' do
sql = connection.create_table(:activity_videos, inherits: :activities) do |t|
t.string :url
end
result = 'CREATE TABLE "activity_videos" ('
result << '"url" character varying'
result << ') INHERITS ( "activities" )'
expect(sql).to eql(result)
end
it 'adds the inherits statement for a multiple inheritance' do
sql = connection.create_table(:activity_tests, inherits: [:activities, :tests]) do |t|
t.string :grade
end
result = 'CREATE TABLE "activity_tests" ('
result << '"grade" character varying'
result << ') INHERITS ( "activities" , "tests" )'
expect(sql).to eql(result)
end
it 'allows empty-body create table operation' do
sql = connection.create_table(:activity_posts, inherits: :activities)
result = "CREATE TABLE \"activity_posts\" ()"
result << ' INHERITS ( "activities" )'
expect(sql).to eql(result)
end
end
context 'on schema' do
let(:source) { ActiveRecord::Base.connection_pool }
let(:dump_result) do
ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))
dump_result.string
end
it 'dumps single inheritance with body' do
parts = '"activity_books"'
parts << ', id: false'
parts << ', inherits: "activities"'
parts << ', force: :cascade'
expect(dump_result).to match(/create_table #{parts} do /)
end
it 'dumps single inheritance without body' do
parts = '"activity_post_samples"'
parts << ', id: false'
parts << ', inherits: "activity_posts"'
parts << ', force: :cascade'
expect(dump_result).to match(/create_table #{parts}(?! do \|t\|)/)
end
it 'dumps multiple inheritance' do
parts = '"activity_posts"'
parts << ', id: false'
parts << ', inherits: (\["images", "activities"\]|\["activities", "images"\])'
parts << ', force: :cascade'
expect(dump_result).to match(/create_table #{parts}/)
end
end
context 'on schema cache' do
let(:schema_cache) { ActiveRecord::Base.connection.schema_cache }
let(:schema_cache_reflection) { schema_cache.instance_variable_get(:@schema_reflection) }
let(:new_schema_cache) { schema_cache_reflection.send(:cache, schema_cache_source) }
let(:schema_cache_source) { schema_cache.instance_variable_get(:@pool) }
subject { new_schema_cache }
it 'correctly defines the associations' do
scenario = {
'M' => %w(N),
'N' => %w(C),
'C' => %w(B),
'B' => %w(A),
'D' => %w(A),
'F' => %w(E),
'G' => %w(E H),
}
subject.instance_variable_set(:@inheritance_loaded, true)
subject.instance_variable_set(:@inheritance_dependencies, scenario)
subject.instance_variable_set(:@inheritance_associations, subject.send(:generate_associations))
subject.instance_variable_set(:@data_sources_model_names, {})
expect(subject.instance_variable_get(:@inheritance_associations)).to eql({
'A' => %w(B D C N M),
'B' => %w(C N M),
'C' => %w(N M),
'N' => %w(M),
'E' => %w(F G),
'H' => %w(G),
})
end
context 'on looking up models' do
let(:prepare_arguments) { [schema_cache_source] }
let(:prepare_method) { :add_all }
after(:all) do
schema_cache = ActiveRecord::Base.connection.schema_cache
schema_cache.instance_variable_set(:@data_sources, {})
schema_cache.instance_variable_set(:@data_sources_model_names, {})
end
it 'respect irregular names' do
allow(Torque::PostgreSQL.config).to receive(:irregular_models).and_return({
'public.posts' => 'ActivityPost',
})
subject.send(prepare_method, *prepare_arguments)
list = subject.instance_variable_get(:@data_sources_model_names)
expect(list).to have_key('public.posts')
expect(list['public.posts']).to eql(ActivityPost)
end
it 'does not load irregular where the data source is not defined' do
allow(Torque::PostgreSQL.config).to receive(:irregular_models).and_return({
'products' => 'Product',
})
subject.send(prepare_method, *prepare_arguments)
list = subject.instance_variable_get(:@data_sources_model_names)
expect(list).to_not have_key('products')
end
it 'works with eager loading' do
allow(Torque::PostgreSQL.config).to receive(:eager_load).and_return(true)
ActivityPost.reset_table_name
list = subject.instance_variable_get(:@data_sources_model_names)
expect(list).to have_key('activity_posts')
expect(list['activity_posts']).to eql(ActivityPost)
end
{
'activities' => 'Activity',
'activity_posts' => 'ActivityPost',
'activity_post_samples' => 'ActivityPost::Sample',
}.each do |table_name, expected_model|
it "translate the table name #{table_name} to #{expected_model} model" do
expect(subject.lookup_model(table_name)).to eql(expected_model.constantize)
end
end
end
end
context 'on inheritance' do
let(:base) { Activity }
let(:child) { ActivityPost }
let(:child2) { ActivityBook }
let(:other) { AuthorJournalist }
before { ActiveRecord::Base.connection.schema_cache.clear! }
it 'identifies mergeable attributes' do
result_base = %w(id author_id title active kind created_at updated_at description url file post_id)
expect(base.inheritance_mergeable_attributes.sort).to eql(result_base.sort)
end
it 'has a merged version of attributes' do
result_base = %w(id author_id title active kind created_at updated_at description url activated file post_id)
result_child = %w(id author_id title active kind created_at updated_at file post_id url activated)
result_child2 = %w(id author_id title active kind created_at updated_at description url activated)
result_other = %w(id name type specialty)
expect(base.inheritance_merged_attributes).to eql(result_base)
expect(child.inheritance_merged_attributes).to eql(result_child)
expect(child2.inheritance_merged_attributes).to eql(result_child2)
expect(other.inheritance_merged_attributes).to eql(result_other)
end
it 'identifies physical inheritance' do
expect(base.physically_inherited?).to be_falsey
expect(child.physically_inherited?).to be_truthy
expect(child2.physically_inherited?).to be_truthy
expect(other.physically_inherited?).to be_falsey
end
it 'returns a list of dependent tables' do
expect(base.inheritance_dependents).to eql(%w(activity_books activity_posts activity_post_samples))
expect(child.inheritance_dependents).to eql(%w(activity_post_samples))
expect(child2.inheritance_dependents).to eql(%w())
expect(other.inheritance_dependents).to eql(%w())
end
it 'can check dependency' do
expect(base.physically_inheritances?).to be_truthy
expect(child.physically_inheritances?).to be_truthy
expect(child2.physically_inheritances?).to be_falsey
expect(other.physically_inheritances?).to be_falsey
end
it 'returns the list of models that the records can be casted to' do
expect(base.casted_dependents.values.map(&:name)).to eql(%w(ActivityBook ActivityPost ActivityPost::Sample))
expect(child.casted_dependents.values.map(&:name)).to eql(%w(ActivityPost::Sample))
expect(child2.casted_dependents.values.map(&:name)).to eql(%w())
expect(other.casted_dependents.values.map(&:name)).to eql(%w())
end
it 'correctly generates the tables name' do
expect(base.table_name).to eql('activities')
expect(child.table_name).to eql('activity_posts')
expect(child2.table_name).to eql('activity_books')
expect(other.table_name).to eql('authors')
end
it 'respects the table name prefix and suffix defined on parent module' do
mod = Object.const_set('Private', Module.new)
mod.define_singleton_method(:table_name_prefix) { 'private.' }
mod.define_singleton_method(:table_name_suffix) { '_bundle' }
result = 'private.activity_post_others_bundle'
klass = mod.const_set('Other', Class.new(ActivityPost))
allow(klass).to receive(:module_parent).and_return(child)
allow(klass).to receive(:module_parents).and_return([mod])
allow(klass).to receive(:physically_inherited?).and_return(true)
expect(klass.send(:compute_table_name)).to be_eql(result)
end
end
context 'on relation' do
let(:base) { Activity }
let(:child) { ActivityBook }
let(:other) { AuthorJournalist }
it 'has operation methods' do
expect(base).to respond_to(:itself_only)
expect(base).to respond_to(:cast_records)
expect(base.new).to respond_to(:cast_record)
end
context 'itself only' do
it 'does not mess with original queries' do
expect(base.all.to_sql).to \
eql('SELECT "activities".* FROM "activities"')
end
it 'adds the only condition to the query' do
expect(base.itself_only.to_sql).to \
eql('SELECT "activities".* FROM ONLY "activities"')
end
it 'returns the right ammount of entries' do
base.create!(title: 'Activity only')
child.create!(title: 'Activity book')
expect(base.count).to eql(2)
expect(base.itself_only.count).to eql(1)
expect(child.count).to eql(1)
end
end
context 'cast records' do
before :each do
base.create(title: 'Activity test')
child.create(title: 'Activity book', url: 'bookurl1')
other.create(name: 'An author name')
end
it 'does not mess with single table inheritance' do
result = 'SELECT "authors".* FROM "authors"'
result << " WHERE \"authors\".\"type\" = 'AuthorJournalist'"
expect(other.all.to_sql).to eql(result)
end
it 'adds all statements to load all the necessary records' do
result = 'SELECT "activities".*, "activities"."tableoid"::regclass AS _record_class, "i_0"."description"'
result << ', COALESCE("i_0"."url", "i_1"."url", "i_2"."url") AS url, "i_0"."activated" AS activity_books__activated'
result << ', "i_1"."activated" AS activity_posts__activated, "i_2"."activated" AS activity_post_samples__activated'
result << ', COALESCE("i_1"."file", "i_2"."file") AS file, COALESCE("i_1"."post_id", "i_2"."post_id") AS post_id'
result << ", \"activities\".\"tableoid\"::regclass::varchar IN ('activity_books', 'activity_posts', 'activity_post_samples') AS _auto_cast"
result << ' FROM "activities"'
result << ' LEFT OUTER JOIN "activity_books" "i_0" ON "activities"."id" = "i_0"."id"'
result << ' LEFT OUTER JOIN "activity_posts" "i_1" ON "activities"."id" = "i_1"."id"'
result << ' LEFT OUTER JOIN "activity_post_samples" "i_2" ON "activities"."id" = "i_2"."id"'
expect(base.cast_records.all.to_sql).to eql(result)
end
it 'can be have simplefied joins' do
result = 'SELECT "activities".*, "activities"."tableoid"::regclass AS _record_class'
result << ', "i_0"."description", "i_0"."url", "i_0"."activated"'
result << ", \"activities\".\"tableoid\"::regclass::varchar IN ('activity_books') AS _auto_cast"
result << ' FROM "activities"'
result << ' LEFT OUTER JOIN "activity_books" "i_0" ON "activities"."id" = "i_0"."id"'
expect(base.cast_records(child).all.to_sql).to eql(result)
end
it 'can be filtered by record type' do
result = 'SELECT "activities".*, "activities"."tableoid"::regclass AS _record_class'
result << ', "i_0"."description", "i_0"."url", "i_0"."activated"'
result << ", \"activities\".\"tableoid\"::regclass::varchar IN ('activity_books') AS _auto_cast"
result << ' FROM "activities"'
result << ' LEFT OUTER JOIN "activity_books" "i_0" ON "activities"."id" = "i_0"."id"'
result << " WHERE \"activities\".\"tableoid\"::regclass::varchar IN ('activity_books')"
expect(base.cast_records(child, filter: true).all.to_sql).to eql(result)
end
it 'works with count and does not add extra columns' do
result = 'SELECT COUNT(*)'
result << ' FROM "activities"'
result << ' LEFT OUTER JOIN "activity_books" "i_0" ON "activities"."id" = "i_0"."id"'
result << ' LEFT OUTER JOIN "activity_posts" "i_1" ON "activities"."id" = "i_1"."id"'
result << ' LEFT OUTER JOIN "activity_post_samples" "i_2" ON "activities"."id" = "i_2"."id"'
query = get_last_executed_query{ base.cast_records.all.count }
expect(query).to eql(result)
end
it 'works with sum and does not add extra columns' do
result = 'SELECT SUM("activities"."id")'
result << ' FROM "activities"'
result << ' LEFT OUTER JOIN "activity_books" "i_0" ON "activities"."id" = "i_0"."id"'
result << ' LEFT OUTER JOIN "activity_posts" "i_1" ON "activities"."id" = "i_1"."id"'
result << ' LEFT OUTER JOIN "activity_post_samples" "i_2" ON "activities"."id" = "i_2"."id"'
query = get_last_executed_query{ base.cast_records.all.sum(:id) }
expect(query).to eql(result)
end
it 'returns the correct model object' do
ActivityPost.create(title: 'Activity post')
ActivityPost::Sample.create(title: 'Activity post')
records = base.cast_records.order(:id).load.to_a
expect(records[0]).to be_instance_of(Activity)
expect(records[1]).to be_instance_of(ActivityBook)
expect(records[2]).to be_instance_of(ActivityPost)
expect(records[3]).to be_instance_of(ActivityPost::Sample)
end
it 'does not cast unnecessary records' do
ActivityPost.create(title: 'Activity post')
records = base.cast_records(ActivityBook).order(:id).load.to_a
expect(records[0]).to be_instance_of(Activity)
expect(records[1]).to be_instance_of(ActivityBook)
expect(records[2]).to be_instance_of(Activity)
end
it 'correctly identifies same name attributes' do
ActivityPost.create(title: 'Activity post', url: 'posturl1')
records = base.cast_records.order(:id).load.to_a
expect(records[1].url).to eql('bookurl1')
expect(records[2].url).to eql('posturl1')
end
# TODO: Maybe in the future
xit 'does not make internal inheritance attributes accessible' do
record = base.cast_records.order(:id).load.last
expect(record).to be_instance_of(ActivityBook)
expect(record).not_to respond_to(:_record_class)
expect(record).not_to respond_to(:_auto_cast)
end
end
context 'cast record' do
before :each do
base.create(title: 'Activity test')
child.create(title: 'Activity book')
other.create(name: 'An author name')
base.instance_variable_set(:@casted_dependents, nil)
end
it 'does not affect normal records' do
expect(base.first.cast_record).to be_a(base)
expect(child.first.cast_record).to be_a(child)
expect(other.first.cast_record).to be_a(other)
end
it 'rises an error when the casted model cannot be defined' do
base.instance_variable_set(:@casted_dependents, {})
expect{ base.second.cast_record }.to raise_error(ArgumentError, /to type 'activity_books'/)
end
it 'can return the record class even when the auxiliary statement is not mentioned' do
expect(base.first._record_class).to eql('activities')
expect(base.second._record_class).to eql('activity_books')
expect(other.first._record_class).to eql('authors')
end
it 'does trigger record casting when accessed through inheritance' do
expect(base.second.cast_record).to eql(child.first)
end
context 'using uuid' do
let(:base) { Question }
let(:child) { QuestionSelect }
before :each do
base.create(title: 'Simple question')
child.create(title: 'Select question')
base.instance_variable_set(:@casted_dependents, nil)
end
it 'does not affect normal records' do
expect(base.first.cast_record).to be_a(base)
expect(child.first.cast_record).to be_a(child)
end
it 'does trigger record casting when accessed through inheritance' do
expect(base.second.cast_record).to eql(child.first)
end
end
end
end
end
================================================
FILE: spec/tests/versioned_commands_spec.rb
================================================
require 'spec_helper'
RSpec.describe 'VersionedCommands' do
let(:connection) { ActiveRecord::Base.connection }
context 'on migration' do
it 'does not have any of the schema methods' do
expect(connection).not_to respond_to(:create_function)
expect(connection).not_to respond_to(:create_type)
expect(connection).not_to respond_to(:create_view)
end
it 'does not have the methods available in a migration' do
instance = Class.new(ActiveRecord::Migration::Current).allocate
expect(instance).not_to respond_to(:create_function)
expect(instance).not_to respond_to(:create_type)
expect(instance).not_to respond_to(:create_view)
end
it 'does have the methods in schema definition' do
instance = ActiveRecord::Schema[ActiveRecord::Migration.current_version].allocate
expect(instance).to respond_to(:create_function)
expect(instance).to respond_to(:create_type)
expect(instance).to respond_to(:create_view)
end
context 'on context' do
let(:context) { connection.pool.migration_context }
let(:path) { Pathname.new(__FILE__).join('../../fixtures/migrations').expand_path.to_s }
before { context.instance_variable_set(:@migrations_paths, [path]) }
it 'list all migrations accordingly' do
result = context.migrations.map { |m| File.basename(m.filename) }
expect(result[0]).to eq('20250101000001_create_users.rb')
expect(result[1]).to eq('20250101000002_create_function_count_users_v1.sql')
expect(result[2]).to eq('20250101000003_create_internal_users.rb')
expect(result[3]).to eq('20250101000004_update_function_count_users_v2.sql')
expect(result[4]).to eq('20250101000005_create_view_all_users_v1.sql')
expect(result[5]).to eq('20250101000006_create_type_user_id_v1.sql')
expect(result[6]).to eq('20250101000007_remove_function_count_users_v2.sql')
end
it 'correctly report the status of all migrations' do
result = context.migrations_status.reject { |s| s[1].start_with?('0') }
expect(result[0]).to eq(['down', '20250101000001', 'Create users'])
expect(result[1]).to eq(['down', '20250101000002', 'Create Function count_users (v1)'])
expect(result[2]).to eq(['down', '20250101000003', 'Create internal users'])
expect(result[3]).to eq(['down', '20250101000004', 'Update Function count_users (v2)'])
expect(result[4]).to eq(['down', '20250101000005', 'Create View all_users (v1)'])
expect(result[5]).to eq(['down', '20250101000006', 'Create Type user_id (v1)'])
expect(result[6]).to eq(['down', '20250101000007', 'Remove Function count_users (v2)'])
end
it 'reports for invalid names' do
allow(context).to receive(:command_files).and_return(['something.sql'])
error = ::Torque::PostgreSQL::IllegalCommandTypeError
expect { context.migrations }.to raise_error(error)
end
end
context 'on validation' do
let(:base) { Torque::PostgreSQL::VersionedCommands }
context 'on function' do
it 'prevents multiple functions definition' do
content = <<~SQL
CREATE FUNCTION test(a integer);
CREATE FUNCTION other_test(a varchar);
SQL
expect do
base.validate!(:function, content, 'test')
end.to raise_error(ArgumentError)
end
it 'prevents same name but different schema' do
content = <<~SQL
CREATE FUNCTION internal.test(a integer);
CREATE FUNCTION external.test(a varchar);
SQL
expect do
base.validate!(:function, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires OR REPLACE clause' do
content = <<~SQL
CREATE OR REPLACE FUNCTION test(a integer);
CREATE FUNCTION test(a varchar);
SQL
expect do
base.validate!(:function, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires matching name' do
content = <<~SQL
CREATE OR REPLACE FUNCTION other_test(a integer);
CREATE OR REPLACE FUNCTION other_test(a varchar);
SQL
expect do
base.validate!(:function, content, 'test')
end.to raise_error(ArgumentError)
end
it 'works when setup correctly' do
content = <<~SQL
CREATE OR REPLACE FUNCTION test(a integer);
CREATE OR REPLACE FUNCTION test(a varchar);
CREATE OR REPLACE FUNCTION TEST(a date);
SQL
expect { base.validate!(:function, content, 'test') }.not_to raise_error
end
it 'supports name with schema' do
content = <<~SQL
CREATE OR REPLACE FUNCTION internal.test(a integer);
CREATE OR REPLACE FUNCTION internal.test(a varchar);
CREATE OR REPLACE FUNCTION internal.TEST(a date);
SQL
expect { base.validate!(:function, content, 'internal_test') }.not_to raise_error
end
end
context 'on type' do
it 'prevents multiple type definitions' do
content = <<~SQL
CREATE TYPE test AS;
CREATE TYPE other_test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'prevents same name but different schema' do
content = <<~SQL
DROP TYPE IF EXISTS internal.test;
CREATE TYPE external.test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'prevents multiple type drops' do
content = <<~SQL
DROP TYPE IF EXISTS test;
DROP TYPE IF EXISTS other_test;
CREATE TYPE test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires DROP TYPE clause' do
content = <<~SQL
CREATE TYPE test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'prevents dropping other types' do
content = <<~SQL
DROP TYPE IF EXISTS other_test;
CREATE TYPE test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires matching name' do
content = <<~SQL
DROP TYPE IF EXISTS other_test;
CREATE TYPE other_test AS;
SQL
expect do
base.validate!(:type, content, 'test')
end.to raise_error(ArgumentError)
end
it 'works when setup correctly' do
content = <<~SQL
DROP TYPE IF EXISTS test;
CREATE TYPE TEST AS;
SQL
expect { base.validate!(:type, content, 'test') }.not_to raise_error
end
it 'supports name with schema' do
content = <<~SQL
DROP TYPE IF EXISTS internal.test;
CREATE TYPE INTERNAL.TEST AS;
SQL
expect { base.validate!(:type, content, 'internal_test') }.not_to raise_error
end
end
context 'on view' do
it 'requires a proper definition' do
content = <<~SQL
CREATE TEMP MATERIALIZED VIEW test AS;
SQL
expect do
base.validate!(:view, content, 'test')
end.to raise_error(ArgumentError)
end
it 'prevents multiple view definitions' do
content = <<~SQL
CREATE VIEW test AS;
CREATE VIEW other_test AS;
SQL
expect do
base.validate!(:view, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires OR REPLACE clause' do
content = <<~SQL
CREATE VIEW test AS;
SQL
expect do
base.validate!(:view, content, 'test')
end.to raise_error(ArgumentError)
end
it 'requires matching name' do
content = <<~SQL
CREATE OR REPLACE VIEW other_test AS;
SQL
expect do
base.validate!(:view, content, 'test')
end.to raise_error(ArgumentError)
end
it 'works when setup correctly' do
content = <<~SQL
CREATE OR REPLACE VIEW TEST AS;
SQL
expect { base.validate!(:view, content, 'test') }.not_to raise_error
end
it 'supports materialized views' do
content = <<~SQL
DROP MATERIALIZED VIEW IF EXISTS test;
CREATE MATERIALIZED VIEW test AS;
SQL
expect { base.validate!(:view, content, 'test') }.not_to raise_error
end
it 'supports name with schema' do
content = <<~SQL
CREATE OR REPLACE VIEW internal.test AS;
SQL
expect { base.validate!(:view, content, 'internal_test') }.not_to raise_error
end
end
end
context 'on running' do
let(:base) { Torque::PostgreSQL::VersionedCommands }
let(:sql) { 'CREATE TYPE test;' }
let(:command) do
base::CommandMigration.new('test.sql', 1, 'create', 'type', 'test', 1)
end
before do
allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages
allow(File).to receive(:expand_path, &:itself)
allow(File).to receive(:read).with('test.sql').and_return(sql)
# Validations are better tested above
allow(base).to receive(:validate!).and_return(true)
end
it 'has the right name' do
expect(command.name).to eq('create_type_test_v1')
end
it 'creates the type properly' do
expect(connection).to receive(:execute).with(sql)
command.migrate(:up)
end
it 'reverts to the previous file' do
sql2 = 'CREATE TYPE test_v1;'
command.op_version = 2
expect(base).to receive(:fetch_command).with(Array, 'type', 'test', 1).and_return(sql2)
expect(connection).to receive(:execute).with(sql2)
command.migrate(:down)
end
it 'reverts to the same version when reverting a remove' do
command.op = 'remove'
command.op_version = 2
expect(base).to receive(:fetch_command).with(Array, 'type', 'test', 2).and_return(sql)
expect(connection).to receive(:execute).with(sql)
command.migrate(:down)
end
it 'properly drops functions' do
command.type = 'function'
sql.replace('CREATE FUNCTION test;')
expect(connection).to receive(:execute).with('DROP FUNCTION test;')
command.migrate(:down)
sql.replace('CREATE FUNCTION test();')
expect(connection).to receive(:execute).with('DROP FUNCTION test();')
command.migrate(:down)
sql.replace('CREATE FUNCTION test(int); CREATE FUNCTION test(float);')
expect(connection).to receive(:execute).with('DROP FUNCTION test(int), test(float);')
command.migrate(:down)
end
it 'properly drops types' do
command.type = 'type'
sql.replace('CREATE TYPE test;')
expect(connection).to receive(:execute).with('DROP TYPE test;')
command.migrate(:down)
end
it 'properly drops views' do
command.type = 'view'
sql.replace('CREATE VIEW test AS SELECT 1;')
expect(connection).to receive(:execute).with('DROP VIEW test;')
command.migrate(:down)
sql.replace('CREATE MATERIALIZED VIEW test AS SELECT 1;')
expect(connection).to receive(:execute).with('DROP MATERIALIZED VIEW test;')
command.migrate(:down)
sql.replace('CREATE RECURSIVE VIEW test AS SELECT 1;')
expect(connection).to receive(:execute).with('DROP VIEW test;')
command.migrate(:down)
end
end
context 'on migrator' do
let(:base) { Torque::PostgreSQL::VersionedCommands }
let(:table) { base::SchemaTable.new(connection.pool) }
let(:context) { connection.pool.migration_context }
let(:versions) { migrations.map(&:version).map(&:to_i) }
let(:migrations) { [ActiveRecord::Migration.new('base', 1)] }
before do
allow_any_instance_of(ActiveRecord::Migration).to receive(:puts) # Disable messages
allow(File).to receive(:expand_path, &:itself)
# Validations are better tested above
allow(base).to receive(:validate!).and_return(true)
allow(context).to receive(:migrations).and_return(migrations)
allow(context.schema_migration).to receive(:integer_versions).and_return(versions)
end
it 'expect the table to not exist by default' do
expect(table.table_exists?).to be_falsey
end
it 'creates the table on first migration' do
migration('CREATE TYPE test;')
expect(table.table_exists?).to be_falsey
context.up(2)
expect(table.table_exists?).to be_truthy
expect(table.count).to eq(1)
expect(table.versions_of('type')).to eq([['test_2', 1]])
end
it 'drops the table if all versions are removed' do
migrations << ActiveRecord::Migration.new('other', 2)
versions << 2
migration('CREATE TYPE test;')
expect(table.table_exists?).to be_falsey
context.up(3)
expect(table.table_exists?).to be_truthy
expect(table.count).to eq(1)
versions << 3
context.down(2)
expect(table.table_exists?).to be_falsey
expect(table.count).to eq(0)
end
it 'does no drop the table if there are still records' do
migration('CREATE TYPE test;')
migration('CREATE TYPE other;')
expect(table.table_exists?).to be_falsey
context.up(3)
expect(table.table_exists?).to be_truthy
expect(table.count).to eq(2)
versions << 2
versions << 3
context.down(2)
expect(table.table_exists?).to be_truthy
expect(table.count).to eq(1)
end
def migration(command)
version = migrations.size + 1
file = "test_#{version}.sql"
name = file.split('.').first
allow(File).to receive(:read).with(file).and_return(command)
migrations << base::CommandMigration.new(file, version, 'create', 'type', name, 1)
end
end
end
context 'on schema dumper' do
let(:source) { ActiveRecord::Base.connection_pool }
let(:schema_table) { double(commands_table.name) }
let(:commands_table) { Torque::PostgreSQL::VersionedCommands::SchemaTable }
let(:dump_result) do
ActiveRecord::SchemaDumper.dump(source, (dump_result = StringIO.new))
dump_result.string
end
before do
allow(commands_table).to receive(:new).and_return(schema_table)
allow(schema_table).to receive(:versions_of).and_return([])
allow(schema_table).to receive(:table_name).and_return('versioned_commands_tbl')
end
it 'does not include versioned commands info by default' do
expect(dump_result).not_to include('"versioned_commands_tbl"')
expect(dump_result).not_to include('# These are types managed by versioned commands')
expect(dump_result).not_to include('# These are functions managed by versioned commands')
expect(dump_result).not_to include('# These are views managed by versioned commands')
end
it 'includes all types' do
connection.execute('CREATE TYPE test;')
connection.execute('CREATE TYPE internal.other;')
allow(schema_table).to receive(:versions_of).with('type').and_return([
['test', 1],
['internal_other', 2],
['remove', 1],
])
expect(dump_result).to include('# These are types managed by versioned commands')
expect(dump_result).to include('create_type "test", version: 1')
expect(dump_result).to include('create_type "internal_other", version: 2')
expect(dump_result).not_to include('create_type "removed", version: 1')
end
it 'includes all functions' do
body = 'RETURNS void AS $$ BEGIN NULL; END; $$ LANGUAGE plpgsql'
connection.execute("CREATE FUNCTION test() #{body};")
connection.execute("CREATE FUNCTION internal.other() #{body};")
allow(schema_table).to receive(:versions_of).with('function').and_return([
['test', 1],
['internal_other', 2],
['remove', 1],
])
expect(dump_result).to include('# These are functions managed by versioned commands')
expect(dump_result).to include('create_function "test", version: 1')
expect(dump_result).to include('create_function "internal_other", version: 2')
expect(dump_result).not_to include('create_function "removed", version: 1')
end
it 'includes all views' do
connection.execute('CREATE VIEW test AS SELECT 1;')
connection.execute('CREATE MATERIALIZED VIEW internal.other AS SELECT 2;')
allow(schema_table).to receive(:versions_of).with('view').and_return([
['test', 1],
['internal_other', 2],
['remove', 1],
])
expect(dump_result).to include('# These are views managed by versioned commands')
expect(dump_result).to include('create_view "test", version: 1')
expect(dump_result).to include('create_view "internal_other", version: 2')
expect(dump_result).not_to include('create_view "removed", version: 1')
end
end
end
================================================
FILE: torque_postgresql.gemspec
================================================
$:.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'torque/postgresql/version'
require 'date'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'torque-postgresql'
s.version = Torque::PostgreSQL::VERSION
s.date = Date.today.to_s
s.authors = ['Carlos Silva']
s.email = ['me@carlosfsilva.com']
s.homepage = 'https://github.com/crashtech/torque-postgresql'
s.summary = 'ActiveRecord extension to access PostgreSQL advanced resources'
s.description = 'Add support to complex resources of PostgreSQL, like data types, array associations, auxiliary statements (CTE), and full-text search (FTS)'
s.license = 'MIT'
s.metadata = {
'homepage_uri' => 'https://torque.dev/postgresql',
"source_code_uri" => 'https://github.com/crashtech/torque-postgresql',
'bug_tracker_uri' => 'https://github.com/crashtech/torque-postgresql/issues',
'changelog_uri' => 'https://github.com/crashtech/torque-postgresql/releases',
}
s.require_paths = ['lib']
s.files = Dir['MIT-LICENSE', 'README.rdoc', 'lib/**/*', 'Rakefile']
s.test_files = Dir['spec/**/*']
s.rdoc_options = ['--title', 'Torque PostgreSQL']
s.required_ruby_version = '>= 3.2'
s.required_rubygems_version = '>= 1.8.11'
s.add_dependency 'rails', '~> 8.0'
s.add_dependency 'pg', '>= 1.2'
s.add_development_dependency 'rake', '~> 13.0'
s.add_development_dependency 'database_cleaner', '~> 2.0'
s.add_development_dependency 'dotenv', '~> 3.1'
s.add_development_dependency 'rspec', '~> 3.5'
s.add_development_dependency 'factory_bot', '~> 6.2'
s.add_development_dependency 'faker', '~> 3.5'
end