Repository: arturictus/sidekiq_alive Branch: master Commit: 6da0cfdadf7b Files: 42 Total size: 60.1 KB Directory structure: gitextract_fpunre1x/ ├── .github/ │ ├── dependabot.yml │ ├── release.yml │ └── workflows/ │ ├── changelog.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .rspec ├── .rubocop.yml ├── .ruby-version ├── .tool-versions ├── CODE_OF_CONDUCT.md ├── Gemfile ├── LICENSE.txt ├── README.md ├── Rakefile ├── bin/ │ ├── console │ └── setup ├── docker-compose.yml ├── lib/ │ ├── sidekiq_alive/ │ │ ├── config.rb │ │ ├── helpers.rb │ │ ├── redis/ │ │ │ ├── base.rb │ │ │ ├── redis_client_gem.rb │ │ │ └── redis_gem.rb │ │ ├── redis.rb │ │ ├── server/ │ │ │ ├── base.rb │ │ │ ├── default.rb │ │ │ ├── http_server.rb │ │ │ └── rack.rb │ │ ├── server.rb │ │ ├── version.rb │ │ └── worker.rb │ └── sidekiq_alive.rb ├── sidekiq_alive.gemspec ├── spec/ │ ├── config_spec.rb │ ├── redis_spec.rb │ ├── server/ │ │ ├── default_spec.rb │ │ └── rack_spec.rb │ ├── server_spec.rb │ ├── sidekiq_alive_spec.rb │ ├── spec_helper.rb │ └── worker_spec.rb └── tasks/ └── version.rake ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/dependabot.yml ================================================ version: 2 updates: - package-ecosystem: "bundler" directory: "/" schedule: interval: "daily" reviewers: - "andrcuns" - package-ecosystem: github-actions directory: "/" schedule: interval: "daily" reviewers: - andrcuns labels: - "ci" ================================================ FILE: .github/release.yml ================================================ changelog: categories: - title: '🚀 New feature or request' labels: - 'enhancement' - title: '🐞 Bug Fixes' labels: - 'bug' - title: '📦 Dependency updates' labels: - 'dependencies' - title: '🧰 Maintenance' labels: - 'maintenance' ================================================ FILE: .github/workflows/changelog.yml ================================================ name: Changelog on: push: tags: - v[0-9]+.[0-9]+.[0-9]+ jobs: release: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Create GitHub release uses: softprops/action-gh-release@v2 with: token: ${{ secrets.GITHUB_TOKEN }} generate_release_notes: true ================================================ FILE: .github/workflows/release.yml ================================================ name: Release on: workflow_dispatch: inputs: semver: description: Bump required: true type: choice options: - major - minor - patch jobs: release: name: Ruby gem runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 with: fetch-depth: 0 token: ${{ secrets.RELEASE_GITHUB_TOKEN }} - name: Set up Ruby 3.3 uses: ruby/setup-ruby@v1 with: bundler-cache: true - name: Update version run: | git config user.name github-actions git config user.email github-actions@github.com bundle config unset deployment bundle exec rake "version[${{ inputs.semver }}]" && git push - name: Create tag and push to rubygems run: bundle exec rake release env: GEM_HOST_API_KEY: ${{ secrets.GEM_HOST_API_KEY }} ================================================ FILE: .github/workflows/test.yml ================================================ name: Test on: push: branches: - master pull_request: branches: - master jobs: rubocop: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Ruby uses: ruby/setup-ruby@v1 with: bundler-cache: true cache-version: 1 - name: Run lint run: bundle exec rubocop --color test: runs-on: ubuntu-latest needs: rubocop strategy: fail-fast: false matrix: ruby-version: ["3.4", "3.3", "3.2"] sidekiq-version: ["~> 6.5", "~> 7", "~> 8"] exclude: - sidekiq-version: "~> 8" ruby-version: "3.1" # Service containers to run with `runner-job` services: # Label used to access the service container redis: # Docker Hub image image: redis # Set health checks to wait until redis has started options: >- --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 ports: # Maps port 6379 on service container to the host - 6379:6379 steps: - uses: actions/checkout@v4 - name: Set up Ruby ${{ matrix.ruby-version }} with Sidekiq ${{ matrix.sidekiq-version }} uses: ruby/setup-ruby@v1 with: ruby-version: ${{ matrix.ruby-version }} bundler-cache: true cache-version: 1 env: SIDEKIQ_VERSION_RANGE: ${{ matrix.sidekiq-version }} - name: Run tests run: bundle exec rspec --force-color env: SIDEKIQ_VERSION_RANGE: ${{ matrix.sidekiq-version }} - name: Add coverage report uses: insightsengineering/coverage-action@v2 # TODO: Add coverage merging from different test runs if: ${{ matrix.ruby-version == '3.4' && matrix.sidekiq-version == '~> 7' }} with: path: coverage/coverage.xml publish: true threshold: 90 pycobertura-exception-failure: false diff: true diff-branch: master coverage-reduction-failure: true ================================================ FILE: .gitignore ================================================ /.bundle/ /.yardoc /_yardoc/ /coverage/ /doc/ /pkg/ /spec/reports/ /tmp/ Gemfile.lock # rspec failure tracking .rspec_status vendor ================================================ FILE: .rspec ================================================ --format documentation --color --require spec_helper --order random ================================================ FILE: .rubocop.yml ================================================ inherit_gem: rubocop-shopify: rubocop.yml AllCops: TargetRubyVersion: 3.1 SuggestExtensions: false NewCops: enable Layout/ArgumentAlignment: Enabled: true EnforcedStyle: with_first_argument Style/InvertibleUnlessCondition: Exclude: - "lib/sidekiq_alive.rb" ================================================ FILE: .ruby-version ================================================ 3.4.4 ================================================ FILE: .tool-versions ================================================ ruby 3.4.4 ================================================ FILE: CODE_OF_CONDUCT.md ================================================ # Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at arturictus@gmail.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] [homepage]: http://contributor-covenant.org [version]: http://contributor-covenant.org/version/1/4/ ================================================ FILE: Gemfile ================================================ # frozen_string_literal: true source "https://rubygems.org" git_source(:github) { |repo_name| "https://github.com/#{repo_name}" } # Specify your gem's dependencies in sidekiq_alive.gemspec gemspec gem "sidekiq", ENV["SIDEKIQ_VERSION_RANGE"] || "< 9" gem "ruby-lsp", "~> 0.23.11", group: :development group :test do gem "simplecov", require: false gem "simplecov-cobertura", require: false # used for testing rack based server gem "rack-test", "~> 2.2.0" # rackup is not compatible with sidekiq < 7 due to rack version requirement if ["7", "8"].any? { |range| ENV["SIDEKIQ_VERSION_RANGE"]&.include?(range) } gem "rackup", "~> 2.2.0" else gem "rack", "< 3" gem "webrick", "< 2" end end ================================================ FILE: LICENSE.txt ================================================ The MIT License (MIT) Copyright (c) 2018 Artur Pañach Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # SidekiqAlive [![Gem Version](https://badge.fury.io/rb/sidekiq_alive.svg)](https://rubygems.org/gems/sidekiq_alive) [![Total Downloads](https://img.shields.io/gem/dt/sidekiq_alive?color=blue)](https://rubygems.org/gems/https://rubygems.org/gems/sidekiq_alive) ![Workflow status](https://github.com/allure-framework/allure-ruby/workflows/Test/badge.svg) --- SidekiqAlive offers a solution to add liveness probe for a Sidekiq instance deployed in Kubernetes. This library can be used to check sidekiq health outside kubernetes. **How?** A http server is started and on each requests validates that a liveness key is stored in Redis. If it is there means is working. A Sidekiq worker is the responsible to storing this key. If Sidekiq stops processing workers this key gets expired by Redis an consequently the http server will return a 500 error. This worker is responsible to requeue itself for the next liveness probe. Each instance in kubernetes will be checked based on `ENV` variable `HOSTNAME` (kubernetes sets this for each replica/pod). On initialization SidekiqAlive will asign to Sidekiq::Worker a queue with the current host and add this queue to the current instance queues to process. example: ``` hostname: foo Worker queue: sidekiq_alive-foo instance queues: - sidekiq_alive-foo *- your queues hostname: bar Worker queue: sidekiq_alive-bar instance queues: - sidekiq_alive-bar *- your queues ``` ## Installation Add this line to your application's Gemfile: ```ruby gem 'sidekiq_alive' ``` And then execute: $ bundle Or install it yourself as: $ gem install sidekiq_alive ## Usage SidekiqAlive will start when running `sidekiq` command. Run `Sidekiq` ``` bundle exec sidekiq ``` ``` curl localhost:7433 #=> Alive! ``` **how to disable?** You can disabled by setting `ENV` variable `DISABLE_SIDEKIQ_ALIVE` example: ``` DISABLE_SIDEKIQ_ALIVE=true bundle exec sidekiq ``` ### Kubernetes setup Set `livenessProbe` in your Kubernetes deployment example with recommended setup: #### Sidekiq < 6 ```yaml spec: containers: - name: my_app image: my_app:latest env: - name: RAILS_ENV value: production command: - bundle - exec - sidekiq ports: - containerPort: 7433 livenessProbe: httpGet: path: / port: 7433 initialDelaySeconds: 80 # app specific. Time your sidekiq takes to start processing. timeoutSeconds: 5 # can be much less readinessProbe: httpGet: path: / port: 7433 initialDelaySeconds: 80 # app specific timeoutSeconds: 5 # can be much less lifecycle: preStop: exec: # SIGTERM triggers a quick exit; gracefully terminate instead command: ['bundle', 'exec', 'sidekiqctl', 'quiet'] terminationGracePeriodSeconds: 60 # put your longest Job time here plus security time. ``` #### Sidekiq >= 6 Create file: _kube/sidekiq_quiet_ ```bash #!/bin/bash # Find Pid SIDEKIQ_PID=$(ps aux | grep sidekiq | grep busy | awk '{ print $2 }') # Send TSTP signal. Note: Alpine Linux needs to use `kill -s SIGTSTP` instead of `kill -TSTP` kill -SIGTSTP $SIDEKIQ_PID ``` Make it executable: ``` $ chmod +x kube/sidekiq_quiet ``` Execute it in your deployment preStop: ```yaml spec: containers: - name: my_app image: my_app:latest env: - name: RAILS_ENV value: production command: - bundle - exec - sidekiq ports: - containerPort: 7433 livenessProbe: httpGet: path: / port: 7433 initialDelaySeconds: 80 # app specific. Time your sidekiq takes to start processing. timeoutSeconds: 5 # can be much less readinessProbe: httpGet: path: / port: 7433 initialDelaySeconds: 80 # app specific timeoutSeconds: 5 # can be much less lifecycle: preStop: exec: # SIGTERM triggers a quick exit; gracefully terminate instead command: ['kube/sidekiq_quiet'] terminationGracePeriodSeconds: 60 # put your longest Job time here plus security time. ``` ### Outside kubernetes It's just up to you how you want to use it. An example in local would be: ``` bundle exec sidekiq # let it initialize ... ``` ``` curl localhost:7433 #=> Alive! ``` ## Options ```ruby SidekiqAlive.setup do |config| # ==> Server host # Host to bind the server. # Can also be set with the environment variable SIDEKIQ_ALIVE_HOST. # default: 0.0.0.0 # # config.host = 0.0.0.0 # ==> Server port # Port to bind the server. # Can also be set with the environment variable SIDEKIQ_ALIVE_PORT. # default: 7433 # # config.port = 7433 # ==> Server path # HTTP path to respond to. # Can also be set with the environment variable SIDEKIQ_ALIVE_PATH. # default: '/' # # config.path = '/' # ==> Custom Liveness Probe # Extra check to decide if restart the pod or not for example connection to DB. # `false`, `nil` or `raise` will not write the liveness probe # default: proc { true } # # config.custom_liveness_probe = proc { db_running? } # ==> Liveness key # Key to be stored in Redis as probe of liveness # default: "SIDEKIQ::LIVENESS_PROBE_TIMESTAMP" # # config.liveness_key = "SIDEKIQ::LIVENESS_PROBE_TIMESTAMP" # ==> Time to live # Time for the key to be kept by Redis. # Here is where you can set de periodicity that the Sidekiq has to probe it is working # Time unit: seconds # default: 10 * 60 # 10 minutes # # config.time_to_live = 10 * 60 # ==> Callback # After the key is stored in redis you can perform anything. # For example a webhook or email to notify the team # default: proc {} # # require 'net/http' # config.callback = proc { Net::HTTP.get("https://status.com/ping") } # ==> Shutdown callback # When sidekiq process is shutting down, you can perform some arbitrary action. # default: proc {} # # config.shutdown_callback = proc { puts "Sidekiq is shutting down" } # ==> Queue Prefix # SidekiqAlive will run in a independent queue for each instance/replica # This queue name will be generated with: "#{queue_prefix}-#{hostname}. # You can customize the prefix here. # default: :sidekiq-alive # # config.queue_prefix = :other # ==> Concurrency # The maximum number of Redis connections requested for the SidekiqAlive pool. # Can also be set with the environment variable SIDEKIQ_ALIVE_CONCURRENCY. # NOTE: only effects Sidekiq 7 or greater. # default: 2 # # config.concurrency = 3 # ==> Rack server # Web server used to serve an HTTP response. By default simple GServer based http server is used. # To use specific server, rack gem version > 2 is required. For rack version >= 3, rackup gem is required. # Can also be set with the environment variable SIDEKIQ_ALIVE_SERVER. # default: nil # # config.server = 'puma' # ==> Quiet mode timeout in seconds # When sidekiq is shutting down, the Sidekiq process stops pulling jobs from the queue. This includes alive key update job. In case of # long running jobs, alive key can expire before the job is finished. To avoid this, web server is set in to quiet mode # and is returning 200 OK for healthcheck requests. To avoid infinite quiet mode in case sidekiq process is stuck in shutdown, # timeout can be set. After timeout is reached, web server resumes normal operations and will return unhealthy status in case # alive key is expired or purged from redis. # default: 180 # # config.quiet_timeout = 300 end ``` ## Development After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment. To install this gem onto your local machine, run `bundle exec rake install`. Here is an example [rails app](https://github.com/arturictus/sidekiq_alive_example) ## Contributing Bug reports and pull requests are welcome on GitHub at https://github.com/arturictus/sidekiq_alive. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct. ## License The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT). ================================================ FILE: Rakefile ================================================ # frozen_string_literal: true require "bundler/gem_tasks" require "rspec/core/rake_task" RSpec::Core::RakeTask.new(:spec) task default: :spec load "tasks/version.rake" SidekiqAlive::VersionTask.new ================================================ FILE: bin/console ================================================ #!/usr/bin/env ruby # frozen_string_literal: true require "bundler/setup" require "sidekiq_alive" # You can add fixtures and/or initialization code here to make experimenting # with your gem easier. You can also use a different console, if you like. # (If you use this, don't forget to add pry to your Gemfile!) # require "pry" # Pry.start require "irb" IRB.start(__FILE__) ================================================ FILE: bin/setup ================================================ #!/usr/bin/env bash set -euo pipefail IFS=$'\n\t' set -vx bundle install # Do any other automated setup that you need to do here ================================================ FILE: docker-compose.yml ================================================ version: '3.6' services: redis: image: redis ports: - 6379:6379 ================================================ FILE: lib/sidekiq_alive/config.rb ================================================ # frozen_string_literal: true module SidekiqAlive class Config include Singleton attr_accessor :host, :port, :path, :liveness_key, :time_to_live, :callback, :registered_instance_key, :queue_prefix, :custom_liveness_probe, :logger, :shutdown_callback, :concurrency, :server, :quiet_timeout def initialize set_defaults end def set_defaults @host = ENV.fetch("SIDEKIQ_ALIVE_HOST", "0.0.0.0") @port = ENV.fetch("SIDEKIQ_ALIVE_PORT", 7433) @path = ENV.fetch("SIDEKIQ_ALIVE_PATH", "/") @liveness_key = "SIDEKIQ::LIVENESS_PROBE_TIMESTAMP" @time_to_live = 10 * 60 @callback = proc {} @registered_instance_key = "SIDEKIQ_REGISTERED_INSTANCE" @queue_prefix = :"sidekiq-alive" @custom_liveness_probe = proc { true } @shutdown_callback = proc {} @concurrency = Integer(ENV.fetch("SIDEKIQ_ALIVE_CONCURRENCY", 2), exception: false) || 2 @server = ENV.fetch("SIDEKIQ_ALIVE_SERVER", nil) @quiet_timeout = Integer(ENV.fetch("SIDEKIQ_ALIVE_QUIET_TIMEOUT", 180), exception: false) || 180 end def registration_ttl @registration_ttl || time_to_live * 3 end def worker_interval time_to_live / 2 end end end ================================================ FILE: lib/sidekiq_alive/helpers.rb ================================================ # frozen_string_literal: true module SidekiqAlive module Helpers class << self def sidekiq_7? current_sidekiq_version >= Gem::Version.new("7") end def sidekiq_6? current_sidekiq_version >= Gem::Version.new("6") && current_sidekiq_version < Gem::Version.new("7") end def sidekiq_5? current_sidekiq_version >= Gem::Version.new("5") && current_sidekiq_version < Gem::Version.new("6") end def use_rack? return @use_rack if defined?(@use_rack) require "rack" @use_rack = current_rack_version < Gem::Version.new("3") rescue LoadError # currently this won't happen because rack is a dependency of sidekiq @use_rack = false end def use_rackup? return @use_rackup if defined?(@use_rackup) require "rackup" @use_rackup = current_rack_version >= Gem::Version.new("3") rescue LoadError if current_rack_version >= Gem::Version.new("3") SidekiqAlive.logger.warn("rackup gem required with rack >= 3, defaulting to default server") end @use_rackup = false end private def current_sidekiq_version Gem.loaded_specs["sidekiq"].version end def current_rack_version Gem.loaded_specs["rack"].version end end end end ================================================ FILE: lib/sidekiq_alive/redis/base.rb ================================================ # frozen_string_literal: true module SidekiqAlive module Redis class Base def set(...) raise(NotImplementedError) end def zadd(set_key, ex, key) raise(NotImplementedError) end def zrange(set_key, start, stop) raise(NotImplementedError) end def zrangebyscore(set_key, min, max) raise(NotImplementedError) end def zrem(set_key, key) raise(NotImplementedError) end def delete(key) raise(NotImplementedError) end def ttl(...) redis { |r| r.ttl(...) } end end end end ================================================ FILE: lib/sidekiq_alive/redis/redis_client_gem.rb ================================================ # frozen_string_literal: true require_relative "base" module SidekiqAlive module Redis # Wrapper for `redis-client` gem used by `sidekiq` > 7 # https://github.com/redis-rb/redis-client class RedisClientGem < Base def initialize(capsule = nil) super() @capsule = Sidekiq.default_configuration.capsules[capsule || CAPSULE_NAME] end def set(key, time:, ex:) redis { |r| r.call("SET", key, time, ex: ex) } end def get(key) redis { |r| r.call("GET", key) } end def zadd(set_key, ex, key) redis { |r| r.call("ZADD", set_key, ex, key) } end def zrange(set_key, start, stop) redis { |r| r.call("ZRANGE", set_key, start, stop) } end def zrangebyscore(set_key, min, max) redis { |r| r.call("ZRANGEBYSCORE", set_key, min, max) } end def zrem(set_key, key) redis { |r| r.call("ZREM", set_key, key) } end def delete(key) redis { |r| r.call("DEL", key) } end private def redis(&block) # Default to Sidekiq.redis if capsule is not configured yet but redis adapter is accessed (@capsule || Sidekiq).redis(&block) end end end end ================================================ FILE: lib/sidekiq_alive/redis/redis_gem.rb ================================================ # frozen_string_literal: true require_relative "base" module SidekiqAlive module Redis # Wrapper for `redis` gem used by sidekiq < 7 # https://github.com/redis/redis-rb class RedisGem < Base def set(key, time:, ex:) redis { |r| r.set(key, time, ex: ex) } end def get(key) redis { |r| r.get(key) } end def zadd(set_key, ex, key) redis { |r| r.zadd(set_key, ex, key) } end def zrange(set_key, start, stop) redis { |r| r.zrange(set_key, start, stop) } end def zrangebyscore(set_key, min, max) redis { |r| r.zrangebyscore(set_key, min, max) } end def zrem(set_key, key) redis { |r| r.zrem(set_key, key) } end def delete(key) redis { |r| r.del(key) } end private def redis(&block) Sidekiq.redis(&block) end end end end ================================================ FILE: lib/sidekiq_alive/redis.rb ================================================ # frozen_string_literal: true module SidekiqAlive module Redis class << self def adapter(capsule = nil) Helpers.sidekiq_7? ? Redis::RedisClientGem.new(capsule) : Redis::RedisGem.new end end end end require_relative "redis/base" require_relative "redis/redis_client_gem" require_relative "redis/redis_gem" ================================================ FILE: lib/sidekiq_alive/server/base.rb ================================================ # frozen_string_literal: true module SidekiqAlive module Server module Base SHUTDOWN_SIGNAL = "TERM" QUIET_SIGNAL = "TSTP" # set web server to quiet mode def quiet! logger.info("[SidekiqAlive] Setting web server to quiet mode") Process.kill(QUIET_SIGNAL, @server_pid) unless @server_pid.nil? end private def configure_shutdown Kernel.at_exit do next if @server_pid.nil? logger.info("Shutting down SidekiqAlive web server") Process.kill(SHUTDOWN_SIGNAL, @server_pid) Process.wait(@server_pid) end end def configure_shutdown_signal(&block) Signal.trap(SHUTDOWN_SIGNAL, &block) end def configure_quiet_signal(&block) Signal.trap(QUIET_SIGNAL, &block) end def host SidekiqAlive.config.host end def port SidekiqAlive.config.port.to_i end def path SidekiqAlive.config.path end def logger SidekiqAlive.logger end end end end ================================================ FILE: lib/sidekiq_alive/server/default.rb ================================================ # frozen_string_literal: true require_relative "http_server" require_relative "base" module SidekiqAlive module Server class Default < HttpServer extend Base class << self def run! logger.info("[SidekiqAlive] Starting default healthcheck server on #{host}:#{port}") @server_pid = ::Process.fork do @server = new(port, host, path) # stop is wrapped in a thread because gserver calls synchrnonize which raises an error when in trap context configure_shutdown_signal { Thread.new { @server.stop } } configure_quiet_signal { @server.quiet! } @server.start @server.join end configure_shutdown logger.info("[SidekiqAlive] Web server started in subprocess with pid #{@server_pid}") self end end def initialize(port, host, path, logger = SidekiqAlive.logger) super(self, port, host, logger) @path = path end def request_handler(req, res) if req.path != path res.status = 404 res.body = "Not found" return logger.warn("[SidekiqAlive] Path '#{req.path}' not found") end if quiet? res.status = 200 res.body = "Server is shutting down" return logger.debug("[SidekiqAlive] Server in quiet mode, skipping alive key lookup!") end if SidekiqAlive.alive? res.status = 200 res.body = "Alive!" return logger.debug("[SidekiqAlive] Found alive key!") end response = "Can't find the alive key" res.status = 404 res.body = response logger.error("[SidekiqAlive] #{response}") rescue StandardError => e response = "Internal Server Error" res.status = 500 res.body = response logger.error("[SidekiqAlive] #{response} looking for alive key. Error: #{e.message}") end def quiet! @quiet = Time.now end private attr_reader :path def quiet? @quiet && (Time.now - @quiet) < SidekiqAlive.config.quiet_timeout end end end end ================================================ FILE: lib/sidekiq_alive/server/http_server.rb ================================================ # frozen_string_literal: true require "gserver" module SidekiqAlive module Server # Simple HTTP server implementation # class HttpServer < GServer # Request class for HTTP server # class Request attr_reader :data, :header, :method, :path, :proto def initialize(data, method = nil, path = nil, proto = nil) @header = {} @data = data @method = method @path = path @proto = proto end def content_length len = @header["Content-Length"] return if len.nil? len.to_i end end # Response class for HTTP server # class Response attr_reader :header attr_accessor :body, :status, :status_message def initialize(status = 200) @status = status @status_message = nil @header = {} end end def initialize(handle_obj, port, host, logger = Logger.new($stdout)) @handler = handle_obj @logger = logger super(port, host, 1, nil, logger.debug?, logger.debug?) end private attr_reader :handler, :logger CRLF = "\r\n" HTTP_PROTO = "HTTP/1.1" SERVER_NAME = "SidekiqAlive/#{SidekiqAlive::VERSION} (Ruby/#{RUBY_VERSION})" # Default header for the server name DEFAULT_HEADER = { "Server" => SERVER_NAME, } # Mapping of status codes and error messages STATUS_CODE_MAPPING = { 200 => "OK", 400 => "Bad Request", 403 => "Forbidden", 404 => "Not Found", 405 => "Method Not Allowed", 411 => "Length Required", 500 => "Internal Server Error", } def serve(io) # parse first line if io.gets =~ /^(\S+)\s+(\S+)\s+(\S+)/ request = Request.new(io, ::Regexp.last_match(1), ::Regexp.last_match(2), ::Regexp.last_match(3)) else io << http_resp(status_code: 400) return end # parse HTTP headers while (line = io.gets) !~ /^(\n|\r)/ if line =~ /^([\w-]+):\s*(.*)$/ request.header[::Regexp.last_match(1)] = ::Regexp.last_match(2).strip end end io.binmode response = Response.new # execute request handler handler.request_handler(request, response) http_response = http_resp( status_code: response.status, status_message: response.status_message, header: response.header, body: response.body, ) # write response back to the client io << http_response rescue StandardError io << http_resp(status_code: 500) end def http_header(header = nil) new_header = DEFAULT_HEADER.dup new_header.merge(header) unless header.nil? new_header["Connection"] = "Keep-Alive" new_header["Date"] = http_date(Time.now) new_header end def http_resp(status_code:, status_message: nil, header: nil, body: nil) status_message ||= STATUS_CODE_MAPPING[status_code] status_line = "#{HTTP_PROTO} #{status_code} #{status_message}".rstrip + CRLF resp_header = http_header(header) resp_header["Content-Length"] = body.bytesize.to_s unless body.nil? header_lines = resp_header.map { |k, v| "#{k}: #{v}#{CRLF}" }.join [status_line, header_lines, CRLF, body].compact.join end def http_date(a_time) a_time.gmtime.strftime("%a, %d %b %Y %H:%M:%S GMT") end def log(msg) logger.debug(msg) end end end end ================================================ FILE: lib/sidekiq_alive/server/rack.rb ================================================ # frozen_string_literal: true require_relative "base" module SidekiqAlive module Server class Rack extend Base class << self def run! logger.info("[SidekiqAlive] Starting healthcheck '#{server}' server") @server_pid = ::Process.fork do @handler = handler configure_shutdown_signal { @handler.shutdown } configure_quiet_signal { @quiet = Time.now } @handler.run(self, Port: port, Host: host, AccessLog: [], Logger: logger) end configure_shutdown self end def call(env) req = ::Rack::Request.new(env) if req.path != path logger.warn("[SidekiqAlive] Path '#{req.path}' not found") return [404, {}, ["Not found"]] end if quiet? logger.debug("[SidekiqAlive] [SidekiqAlive] Server in quiet mode, skipping alive key lookup!") return [200, {}, ["Server is shutting down"]] end if SidekiqAlive.alive? logger.debug("[SidekiqAlive] Found alive key!") return [200, {}, ["Alive!"]] end response = "Can't find the alive key" logger.error("[SidekiqAlive] #{response}") [404, {}, [response]] rescue StandardError => e logger.error("[SidekiqAlive] #{response} looking for alive key. Error: #{e.message}") [500, {}, ["Internal Server Error"]] end private def quiet? @quiet && (Time.now - @quiet) < SidekiqAlive.config.quiet_timeout end def handler Helpers.use_rackup? ? ::Rackup::Handler.get(server) : ::Rack::Handler.get(server) end def server SidekiqAlive.config.server end end end end end ================================================ FILE: lib/sidekiq_alive/server.rb ================================================ # frozen_string_literal: true module SidekiqAlive module Server class << self def run! server.run! end private def server use_rack? ? Rack : Default end def use_rack? return false unless SidekiqAlive.config.server Helpers.use_rackup? || Helpers.use_rack? end def logger SidekiqAlive.logger end end end end require_relative "server/default" require_relative "server/rack" ================================================ FILE: lib/sidekiq_alive/version.rb ================================================ # frozen_string_literal: true module SidekiqAlive VERSION = "2.5.0" end ================================================ FILE: lib/sidekiq_alive/worker.rb ================================================ # frozen_string_literal: true module SidekiqAlive class Worker include Sidekiq::Worker sidekiq_options retry: false # Passing the hostname argument it's only for debugging enqueued jobs def perform(_hostname = SidekiqAlive.hostname) # Checks if custom liveness probe passes should fail or return false return unless config.custom_liveness_probe.call # Writes the liveness in Redis write_living_probe remove_orphaned_queues # schedules next living probe self.class.perform_in(config.worker_interval, current_hostname) end def write_living_probe # Write liveness probe SidekiqAlive.store_alive_key # Increment ttl for current registered instance SidekiqAlive.register_current_instance # after callbacks begin config.callback.call rescue StandardError nil end end # Removes orphaned Sidekiq queues left behind by unexpected instance shutdowns (e.g., due to OOM) def remove_orphaned_queues # If the worker isn't executed within this window, the lifeness key expires latency_threshold = config.time_to_live - config.worker_interval Sidekiq::Queue.all .filter { |q| q.name.start_with?(config.queue_prefix.to_s) } .filter { |q| q.latency > latency_threshold } .filter { |q| q.size == 1 && q.all? { |job| job.klass == self.class.name } } .each(&:clear) end def current_hostname SidekiqAlive.hostname end def config SidekiqAlive.config end end end ================================================ FILE: lib/sidekiq_alive.rb ================================================ # frozen_string_literal: true require "sidekiq" require "sidekiq/api" require "singleton" require "sidekiq_alive/version" require "sidekiq_alive/config" require "sidekiq_alive/helpers" require "sidekiq_alive/redis" module SidekiqAlive HOSTNAME_REGISTRY = "sidekiq-alive-hostnames" CAPSULE_NAME = "sidekiq-alive" class << self def start Sidekiq.configure_server do |sq_config| sq_config.on(:startup) do SidekiqAlive::Worker.sidekiq_options(queue: current_queue) if Helpers.sidekiq_7? sq_config.capsule(CAPSULE_NAME) do |cap| cap.concurrency = config.concurrency cap.queues = [current_queue] end else (sq_config.respond_to?(:[]) ? sq_config[:queues] : sq_config.options[:queues]).unshift(current_queue) end logger.info("[SidekiqAlive] #{startup_info}") register_current_instance store_alive_key # Passing the hostname argument it's only for debugging enqueued jobs SidekiqAlive::Worker.perform_async(hostname) @server = SidekiqAlive::Server.run! logger.info("[SidekiqAlive] #{successful_startup_text}") end sq_config.on(:quiet) do logger.info("[SidekiqAlive] #{shutdown_info}") purge_pending_jobs # set web server to quiet mode @server&.quiet! end sq_config.on(:shutdown) do remove_queue # make sure correct redis connection pool is used # sidekiq will terminate non internal capsules Redis.adapter("internal").zrem(HOSTNAME_REGISTRY, current_instance_register_key) config.shutdown_callback.call end end end def current_queue "#{config.queue_prefix}-#{hostname}" end def register_current_instance register_instance(current_instance_register_key) end def registered_instances # before we return we make sure we expire old keys expire_old_keys redis.zrange(HOSTNAME_REGISTRY, 0, -1) end def purge_pending_jobs schedule_set = Sidekiq::ScheduledSet.new jobs = if Helpers.sidekiq_5? schedule_set.select { |job| job.klass == "SidekiqAlive::Worker" && job.queue == current_queue } else schedule_set.scan('"class":"SidekiqAlive::Worker"').select { |job| job.queue == current_queue } end unless jobs.empty? logger.info("[SidekiqAlive] Purging #{jobs.count} pending jobs for #{hostname}") jobs.each(&:delete) end end def remove_queue logger.info("[SidekiqAlive] Removing queue #{current_queue}") Sidekiq::Queue.new(current_queue).clear end def current_instance_register_key "#{config.registered_instance_key}::#{hostname}" end def current_instance_registered? redis.get(current_instance_register_key) end def store_alive_key redis.set(current_lifeness_key, time: Time.now.to_i, ex: config.time_to_live.to_i) end def redis @redis ||= Redis.adapter end def alive? redis.ttl(current_lifeness_key) != -2 end # CONFIG --------------------------------------- def setup yield(config) end def logger config.logger || Sidekiq.logger end def config @config ||= SidekiqAlive::Config.instance end def current_lifeness_key "#{config.liveness_key}::#{hostname}" end def hostname ENV["HOSTNAME"] || "HOSTNAME_NOT_SET" end def shutdown_info "Shutting down sidekiq-alive!" end def startup_info info = { hostname: hostname, port: config.port, ttl: config.time_to_live, queue: current_queue, register_set: HOSTNAME_REGISTRY, liveness_key: current_lifeness_key, register_key: current_instance_register_key, } "Starting sidekiq-alive: #{info}" end def successful_startup_text "Successfully started sidekiq-alive, registered with key: " \ "#{current_instance_register_key} on set #{HOSTNAME_REGISTRY}" end def expire_old_keys # we get every key that should be expired by now keys_to_expire = redis.zrangebyscore(HOSTNAME_REGISTRY, 0, Time.now.to_i) # then we remove it keys_to_expire.each { |key| redis.zrem(HOSTNAME_REGISTRY, key) } end def register_instance(instance_name) expiration = Time.now.to_i + config.registration_ttl.to_i redis.zadd(HOSTNAME_REGISTRY, expiration, instance_name) expire_old_keys end end end require "sidekiq_alive/worker" require "sidekiq_alive/server" SidekiqAlive.start unless ENV.fetch("DISABLE_SIDEKIQ_ALIVE", "").casecmp("true").zero? ================================================ FILE: sidekiq_alive.gemspec ================================================ # frozen_string_literal: true lib = File.expand_path("lib", __dir__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "sidekiq_alive/version" Gem::Specification.new do |spec| spec.name = "sidekiq_alive" spec.authors = ["Andrejs Cunskis", "Artur Pañach"] spec.email = ["andrejs.cunskis@gmail.com", "arturictus@gmail.com"] spec.version = SidekiqAlive::VERSION spec.required_ruby_version = Gem::Requirement.new(">= 3.1") spec.homepage = "https://github.com/arturictus/sidekiq_alive" spec.summary = "Liveness probe for sidekiq on Kubernetes deployments." spec.license = "MIT" spec.description = <<~DSC SidekiqAlive offers a solution to add liveness probe of a Sidekiq instance. How? A http server is started and on each requests validates that a liveness key is stored in Redis. If it is there means is working. A Sidekiq job is the responsable to storing this key. If Sidekiq stops processing jobs this key gets expired by Redis an consequently the http server will return a 500 error. This Job is responsible to requeue itself for the next liveness probe. DSC spec.metadata = { "homepage_uri" => spec.homepage, "source_code_uri" => spec.homepage, "changelog_uri" => "#{spec.homepage}/releases", "documentation_uri" => "#{spec.homepage}/blob/v#{spec.version}/README.md", "bug_tracker_uri" => "#{spec.homepage}/issues", } spec.files = Dir["README.md", "lib/**/*"] spec.require_paths = ["lib"] spec.add_development_dependency("bundler", "> 1.16") spec.add_development_dependency("debug", "~> 1.6") spec.add_development_dependency("rake", "~> 13.0") spec.add_development_dependency("rspec", "~> 3.0") spec.add_development_dependency("rspec-sidekiq", "~> 5.0") spec.add_development_dependency("rubocop-shopify", "~> 2.10") spec.add_development_dependency("semver2", "~> 3.4") spec.add_development_dependency("solargraph", "~> 0.54.0") spec.add_dependency("base64", ">= 0", "< 1") # sidekiq 6 requires base64 which is not part of stdlib in Ruby 3.4+ spec.add_dependency("gserver", "~> 0.0.1") spec.add_dependency("sidekiq", ">= 5", "< 9") end ================================================ FILE: spec/config_spec.rb ================================================ # frozen_string_literal: true RSpec.describe(SidekiqAlive::Config) do subject(:config) { described_class.instance } describe "#worker_interval" do it "less than ttl" do expect(config.worker_interval).to(satisfy { |i| i < config.time_to_live }) end end end ================================================ FILE: spec/redis_spec.rb ================================================ # frozen_string_literal: true RSpec.describe(SidekiqAlive::Redis) do let(:redis) { SidekiqAlive::Redis.adapter } it "Works" do time = Time.now.to_s redis.set("hello", time: time, ex: 60) expect(redis.ttl("hello") > 1).to(be(true)) expect(redis.get("hello")).to(eq(time)) redis.zadd("test_set", Time.now.to_i, "test-key-1") redis.zadd("test_set", Time.now.to_i, "test-key-2") expect(redis.zrange("test_set", 0, -1)).to(eq(["test-key-1", "test-key-2"])) expect(redis.zrem("test_set", "test-key-1")) expect(redis.zrange("test_set", 0, -1)).to(eq(["test-key-2"])) end end ================================================ FILE: spec/server/default_spec.rb ================================================ # frozen_string_literal: true require "net/http" RSpec.describe(SidekiqAlive::Server::Default) do let(:port) { 7433 } let(:path) { "/" } let(:server) { SidekiqAlive::Server::Default.new(port, "0.0.0.0", path) } before do server.start end after do server.stop end def get(uri) @last_response = Net::HTTP.get_response(URI("http://localhost:#{port}#{uri}")) end context "with default configuration" do it "responds with success when the service is alive" do allow(SidekiqAlive).to(receive(:alive?) { true }) get "/" expect(@last_response.code).to(eq("200")) expect(@last_response.body).to(eq("Alive!")) end it "responds with an error when the service is not alive" do allow(SidekiqAlive).to(receive(:alive?) { false }) get "/" expect(@last_response.code).to(eq("404")) expect(@last_response.body).to(eq("Can't find the alive key")) end it "responds not found on an unknown path" do get "/unknown-path" expect(@last_response.code).to(eq("404")) expect(@last_response.body).to(eq("Not found")) end end context "with custom path" do let(:path) { "/sidekiq-probe" } it "responds ok to the given path" do allow(SidekiqAlive).to(receive(:alive?) { true }) get "/sidekiq-probe" expect(@last_response.code).to(eq("200")) end end context "with quiet mode" do before do server.quiet! end it "responds with success and server is shutting down message" do get "/" expect(@last_response.code).to(eq("200")) expect(@last_response.body).to(eq("Server is shutting down")) end end end ================================================ FILE: spec/server/rack_spec.rb ================================================ # frozen_string_literal: true require "rack/test" ENV["RACK_ENV"] = "test" RSpec.describe(SidekiqAlive::Server::Rack) do include Rack::Test::Methods subject(:app) { described_class } before do described_class.instance_variable_set(:@quiet, nil) end context "with default configuration" do it "responds with success when the service is alive" do allow(SidekiqAlive).to(receive(:alive?) { true }) get "/" expect(last_response).to(be_ok) expect(last_response.body).to(eq("Alive!")) end it "responds with an error when the service is not alive" do allow(SidekiqAlive).to(receive(:alive?) { false }) get "/" expect(last_response).not_to(be_ok) expect(last_response.body).to(eq("Can't find the alive key")) end it "responds not found on an unknown path" do get "/unknown-path" expect(last_response).not_to(be_ok) expect(last_response.body).to(eq("Not found")) end end context "with custom path" do let(:path) { "/sidekiq-probe" } before do ENV["SIDEKIQ_ALIVE_PATH"] = path SidekiqAlive.config.set_defaults end after do ENV["SIDEKIQ_ALIVE_PATH"] = nil end it "responds ok to the given path" do allow(SidekiqAlive).to(receive(:alive?) { true }) get "/sidekiq-probe" expect(last_response).to(be_ok) end end context "with quiet mode" do before do described_class.instance_variable_set(:@quiet, Time.now) end it "responds with success and server is shutting down message" do get "/" expect(last_response).to(be_ok) expect(last_response.body).to(eq("Server is shutting down")) end end end ================================================ FILE: spec/server_spec.rb ================================================ # frozen_string_literal: true around_config = proc do |example| ENV["SIDEKIQ_ALIVE_HOST"] = "1.2.3.4" ENV["SIDEKIQ_ALIVE_PORT"] = "4567" ENV["SIDEKIQ_ALIVE_PATH"] = "/health" SidekiqAlive.config.set_defaults example.run ENV["SIDEKIQ_ALIVE_HOST"] = nil ENV["SIDEKIQ_ALIVE_PORT"] = nil ENV["SIDEKIQ_ALIVE_PATH"] = nil end RSpec.describe(SidekiqAlive::Server, :aggregate_failures) do subject(:app) { described_class } let(:pid) { Random.rand(1000) } before do allow(Process).to(receive(:fork).and_yield.and_return(pid)) allow(Process).to(receive(:kill)) allow(Process).to(receive(:wait)) allow(Signal).to(receive(:trap)) allow(Kernel).to(receive(:at_exit)) end context "with default server" do let(:fake_server) do instance_double( SidekiqAlive::Server::Default, start: nil, stop: nil, join: nil, quiet!: nil, ) end before do allow(SidekiqAlive::Server::Default).to(receive(:new).and_return(fake_server)) allow(Thread).to(receive(:new).and_yield) app.run! end context "with default config" do it "starts server with default arguments and configures lifecycle" do expect(SidekiqAlive::Server::Default).to(have_received(:new).with(7433, "0.0.0.0", "/")) expect(fake_server).to(have_received(:start)) expect(fake_server).to(have_received(:join)) end it "configures signals" do expect(Signal).to(have_received(:trap).with("TERM")) do |&arg| arg.call expect(fake_server).to(have_received(:stop)) end expect(Signal).to(have_received(:trap).with("TSTP")) do |&arg| arg.call expect(fake_server).to(have_received(:quiet!)) end end it "configures shutdown" do allow(Kernel).to(receive(:at_exit)) do |&arg| arg.call expect(Process).to(have_received(:kill).with("TERM", pid)) expect(Process).to(have_received(:wait).with(pid)) end end end context "with changed host, port and path configuration" do around(&around_config) it "starts with updated configuration" do expect(SidekiqAlive::Server::Default).to(have_received(:new).with(4567, "1.2.3.4", "/health")) end end end context "rack based server" do let(:fake_server) { double("rack server", run: nil, shutdown: nil) } let(:handler) { SidekiqAlive::Helpers.use_rackup? ? Rackup::Handler : Rack::Handler } before do ENV["SIDEKIQ_ALIVE_SERVER"] = "webrick" SidekiqAlive.config.set_defaults allow(handler).to(receive(:get).and_return(fake_server)) SidekiqAlive::Server::Rack.instance_variable_set(:@quiet, nil) app.run! end after { ENV["SIDEKIQ_ALIVE_SERVER"] = nil } context "with default config" do it "starts server with default arguments and traps shutdown", :aggregate_failures do expect(handler).to(have_received(:get).with("webrick")) expect(fake_server).to(have_received(:run).with( SidekiqAlive::Server::Rack, Port: 7433, Host: "0.0.0.0", AccessLog: [], Logger: SidekiqAlive.logger )) end it "configures signals" do expect(Signal).to(have_received(:trap).with("TERM")) do |&arg| arg.call expect(fake_server).to(have_received(:shutdown)) end expect(Signal).to(have_received(:trap).with("TSTP")) do |&arg| arg.call expect(SidekiqAlive::Server::Rack.instance_variable_get(:@quiet)).to(be_instance_of(Time)) end end it "configures shutdown" do allow(Kernel).to(receive(:at_exit)) do |&arg| arg.call expect(Process).to(have_received(:kill).with("TERM", pid)) expect(Process).to(have_received(:wait).with(pid)) end end end context "with changed host, port and path configuration" do around(&around_config) it "starts with updated configuration" do expect(fake_server).to(have_received(:run).with( SidekiqAlive::Server::Rack, Port: 4567, Host: "1.2.3.4", AccessLog: [], Logger: SidekiqAlive.logger )) end end end end ================================================ FILE: spec/sidekiq_alive_spec.rb ================================================ # frozen_string_literal: true begin # this is needed for spec to work with sidekiq >7 require "sidekiq/capsule" rescue LoadError # rubocop:disable Lint/SuppressedException end RSpec.describe(SidekiqAlive) do context "with configuration" do it "has a version number" do expect(SidekiqAlive::VERSION).not_to(be(nil)) end it "configures the host from the #setup" do described_class.setup do |config| config.host = "1.2.3.4" end expect(described_class.config.host).to(eq("1.2.3.4")) end it "configures the host from the SIDEKIQ_ALIVE_HOST ENV var" do ENV["SIDEKIQ_ALIVE_HOST"] = "1.2.3.4" SidekiqAlive.config.set_defaults expect(described_class.config.host).to(eq("1.2.3.4")) ENV["SIDEKIQ_ALIVE_HOST"] = nil end it "configures the port from the #setup" do described_class.setup do |config| config.port = 4567 end expect(described_class.config.port).to(eq(4567)) end it "configures the port from the SIDEKIQ_ALIVE_PORT ENV var" do ENV["SIDEKIQ_ALIVE_PORT"] = "4567" SidekiqAlive.config.set_defaults expect(described_class.config.port).to(eq("4567")) ENV["SIDEKIQ_ALIVE_PORT"] = nil end it "configures the concurrency from the SIDEKIQ_ALIVE_CONCURRENCY ENV var" do ENV["SIDEKIQ_ALIVE_CONCURRENCY"] = "3" SidekiqAlive.config.set_defaults expect(described_class.config.concurrency).to(eq(3)) ENV["SIDEKIQ_ALIVE_CONCURRENCY"] = nil end it "configurations behave as expected" do k = described_class.config expect(k.host).to(eq("0.0.0.0")) k.host = "1.2.3.4" expect(k.host).to(eq("1.2.3.4")) expect(k.port).to(eq(7433)) k.port = 4567 expect(k.port).to(eq(4567)) expect(k.liveness_key).to(eq("SIDEKIQ::LIVENESS_PROBE_TIMESTAMP")) k.liveness_key = "key" expect(k.liveness_key).to(eq("key")) expect(k.time_to_live).to(eq(10 * 60)) k.time_to_live = 2 * 60 expect(k.time_to_live).to(eq(2 * 60)) expect(k.callback.call).to(eq(nil)) k.callback = proc { "hello" } expect(k.callback.call).to(eq("hello")) expect(k.queue_prefix).to(eq(:"sidekiq-alive")) k.queue_prefix = :other expect(k.queue_prefix).to(eq(:other)) expect(k.shutdown_callback.call).to(eq(nil)) k.shutdown_callback = proc { "hello" } expect(k.shutdown_callback.call).to(eq("hello")) end end context "with redis" do let(:sidekiq_7) { SidekiqAlive::Helpers.sidekiq_7? } # Older versions of sidekiq yielded Sidekiq module as configuration object # With sidekiq > 7, configuration is a separate class let(:sq_config) { sidekiq_7 ? Sidekiq.default_configuration : Sidekiq } before do allow(Sidekiq).to(receive(:server?) { true }) allow(sq_config).to(receive(:on)) if sidekiq_7 allow(sq_config).to(receive(:capsule).and_call_original) elsif sq_config.respond_to?(:[]) allow(sq_config).to(receive(:[]).and_call_original) else allow(sq_config).to(receive(:options).and_call_original) end end it '::store_alive_key" stores key with the expected ttl' do redis = SidekiqAlive.redis expect(redis.ttl(SidekiqAlive.current_lifeness_key)).to(eq(-2)) SidekiqAlive.store_alive_key expect(redis.ttl(SidekiqAlive.current_lifeness_key)).to(eq(SidekiqAlive.config.time_to_live)) end it "::current_lifeness_key" do expect(SidekiqAlive.current_lifeness_key).to(include("::test-hostname")) end it "::hostname" do expect(SidekiqAlive.hostname).to(eq("test-hostname")) end it "::alive?" do expect(SidekiqAlive.alive?).to(be(false)) SidekiqAlive.store_alive_key expect(SidekiqAlive.alive?).to(be(true)) end context "::start" do let(:server) { double("Server", quiet!: nil) } let(:queue_prefix) { :heathcheck } let(:queues) do next Sidekiq.default_configuration.capsules[SidekiqAlive::CAPSULE_NAME].queues if sidekiq_7 sq_config.options[:queues] end before do allow(SidekiqAlive::Server).to(receive(:run!) { server }) allow(sq_config).to(receive(:on).with(:startup).and_yield) SidekiqAlive.instance_variable_set(:@redis, nil) end it "::registered_instances" do SidekiqAlive.start expect(SidekiqAlive.registered_instances.count).to(eq(1)) expect(SidekiqAlive.registered_instances.first).to(include("test-hostname")) end it "::on(:quiet)" do SidekiqAlive.start expect(sq_config).to(have_received(:on).with(:quiet)) do |&arg| arg.call expect(server).to(have_received(:quiet!)) end end it "::on(:shutdown)" do callback = double("callback", call: nil) SidekiqAlive.config.shutdown_callback = callback SidekiqAlive.start expect(sq_config).to(have_received(:on).with(:shutdown)) do |&arg| arg.call expect(SidekiqAlive.registered_instances.count).to(eq(0)) expect(callback).to(have_received(:call)) end end it "::queues" do SidekiqAlive.config.queue_prefix = queue_prefix SidekiqAlive.start expect(queues.first).to(eq("#{queue_prefix}-test-hostname")) end end end end ================================================ FILE: spec/spec_helper.rb ================================================ # frozen_string_literal: true require "simplecov" SimpleCov.start require "simplecov-cobertura" SimpleCov.formatter = SimpleCov::Formatter::CoberturaFormatter require "bundler/setup" require "sidekiq_alive" require "rspec-sidekiq" require "debug" require "rack" ENV["HOSTNAME"] = "test-hostname" Sidekiq.logger.level = Logger::FATAL RSpec.configure do |config| # Enable flags like --only-failures and --next-failure config.example_status_persistence_file_path = ".rspec_status" # Disable RSpec exposing methods globally on `Module` and `main` config.disable_monkey_patching! config.expect_with(:rspec) do |c| c.syntax = :expect end config.prepend_before do Sidekiq.redis(&:flushall) SidekiqAlive.config.set_defaults end end ================================================ FILE: spec/worker_spec.rb ================================================ # frozen_string_literal: true RSpec.describe(SidekiqAlive::Worker) do subject(:perform) do described_class.new.perform end context "When being executed in the same instance" do it "stores alive key and requeues it self" do SidekiqAlive.register_current_instance expect(described_class).to(receive(:perform_in)) n = 0 SidekiqAlive.config.callback = proc { n = 2 } perform expect(n).to(eq(2)) expect(SidekiqAlive.alive?).to(be(true)) end end context "custom liveness probe" do it "on error" do expect(described_class).not_to(receive(:perform_in)) n = 0 SidekiqAlive.config.custom_liveness_probe = proc do n = 2 raise "Nop" end begin perform rescue StandardError nil end expect(n).to(eq(2)) expect(SidekiqAlive.alive?).to(be(false)) end it "on success" do expect(described_class).to(receive(:perform_in)) n = 0 SidekiqAlive.config.custom_liveness_probe = proc { n = 2 } perform expect(n).to(eq(2)) expect(SidekiqAlive.alive?).to(be(true)) end end describe "orphaned queues removal" do it "removes orphaned queues" do queue = instance_double(Sidekiq::Queue, name: "notifications", latency: 10_000, size: 1, clear: nil) orphaning_queue = instance_double(Sidekiq::Queue, name: "sidekiq-alive-bar", latency: 200, size: 1, clear: nil) orphaned_queue = instance_double(Sidekiq::Queue, name: "sidekiq-alive-foo", latency: 350, size: 1, clear: nil) alive_job = instance_double(Sidekiq::JobRecord, klass: "SidekiqAlive::Worker") allow(orphaned_queue).to(receive(:all?).and_yield(alive_job)) imposter_queue = instance_double(Sidekiq::Queue, name: "sidekiq-aliveness", latency: 10_000, size: 1, clear: nil) job = instance_double(Sidekiq::JobRecord, klass: "AlivenessWorker") allow(imposter_queue).to(receive(:all?).and_yield(job)) allow(Sidekiq::Queue).to(receive(:all).and_return([queue, imposter_queue, orphaned_queue, orphaning_queue])) perform expect(queue).not_to(have_received(:clear)) expect(imposter_queue).not_to(have_received(:clear)) expect(orphaned_queue).to(have_received(:clear)) expect(orphaning_queue).not_to(have_received(:clear)) end end end ================================================ FILE: tasks/version.rake ================================================ # frozen_string_literal: true require "semver" module SidekiqAlive # Update app version # class VersionTask include Rake::DSL VERSION_FILE = "lib/sidekiq_alive/version.rb" def initialize add_version_task end # Add version bump task # def add_version_task desc("Bump application version [major, minor, patch]") task(:version, [:semver]) do |_task, args| new_version = send(args[:semver]).format("%M.%m.%p").to_s update_version(new_version) commit_and_tag(new_version) end end private # Update version file # # @param [SemVer] new_version # @return [void] def update_version(new_version) u_version = File.read(VERSION_FILE).gsub(SidekiqAlive::VERSION, new_version) File.write(VERSION_FILE, u_version) end # Commit updated version file and Gemfile.lock # # @return [void] def commit_and_tag(new_version) sh("git add #{VERSION_FILE}") sh("git commit -m 'Update version to #{new_version}'") end # Semver of ref from # # @return [SemVer] def semver @semver ||= SemVer.parse(SidekiqAlive::VERSION) end # Increase patch version # # @return [SemVer] def patch semver.tap { |ver| ver.patch += 1 } end # Increase minor version # # @return [SemVer] def minor semver.tap do |ver| ver.minor += 1 ver.patch = 0 end end # Increase major version # # @return [SemVer] def major semver.tap do |ver| ver.major += 1 ver.minor = 0 ver.patch = 0 end end end end