Repository: square/kochiku Branch: master Commit: d9a2dd8f1d6c Files: 285 Total size: 831.4 KB Directory structure: gitextract_m1o94eo4/ ├── .gitignore ├── .haml-lint.yml ├── .rspec ├── .rubocop.yml ├── .rubocop_todo.yml ├── .ruby-version ├── .travis.yml ├── CONTRIBUTING.md ├── Capfile ├── Gemfile ├── LICENSE.txt ├── README.md ├── Rakefile ├── app/ │ ├── assets/ │ │ ├── javascripts/ │ │ │ └── application.js │ │ └── stylesheets/ │ │ └── screen.sass │ ├── controllers/ │ │ ├── application_controller.rb │ │ ├── branches_controller.rb │ │ ├── build_artifacts_controller.rb │ │ ├── build_attempts_controller.rb │ │ ├── build_parts_controller.rb │ │ ├── builds_controller.rb │ │ ├── concerns/ │ │ │ └── build_attempts_queue_position.rb │ │ ├── dashboards_controller.rb │ │ ├── pull_requests_controller.rb │ │ ├── repositories_controller.rb │ │ └── status_controller.rb │ ├── decorators/ │ │ ├── branch_decorator.rb │ │ └── build_part_decorator.rb │ ├── helpers/ │ │ ├── application_helper.rb │ │ ├── build_helper.rb │ │ ├── mail_helper.rb │ │ └── project_stats_helper.rb │ ├── jobs/ │ │ ├── build_attempt_job.rb │ │ ├── build_initiated_by_job.rb │ │ ├── build_partitioning_job.rb │ │ ├── build_state_update_job.rb │ │ ├── enforce_timeouts_job.rb │ │ ├── job_base.rb │ │ ├── poll_repositories_job.rb │ │ └── timeout_stuck_builds_job.rb │ ├── mailers/ │ │ ├── build_mailer.rb │ │ └── merge_mailer.rb │ ├── models/ │ │ ├── branch.rb │ │ ├── build.rb │ │ ├── build_artifact.rb │ │ ├── build_attempt.rb │ │ ├── build_part.rb │ │ ├── repository.rb │ │ └── repository_observer.rb │ ├── uploaders/ │ │ ├── base_log_file_uploader.rb │ │ ├── log_file_uploader.rb │ │ └── on_success_uploader.rb │ └── views/ │ ├── branches/ │ │ ├── health.html.haml │ │ ├── index.html.haml │ │ ├── show.html.haml │ │ ├── show.json.erb │ │ ├── show.rss.builder │ │ └── status_report.xml.builder │ ├── build_attempts/ │ │ ├── _build_attempt.html.haml │ │ └── stream_logs.html.haml │ ├── build_mailer/ │ │ ├── build_break_email.html.haml │ │ ├── build_break_email.text.erb │ │ ├── build_success_email.html.haml │ │ ├── build_success_email.text.erb │ │ ├── error_email.html.haml │ │ └── error_email.text.erb │ ├── build_parts/ │ │ ├── _build_attempts.html.haml │ │ ├── _build_part.html.haml │ │ └── show.html.haml │ ├── builds/ │ │ ├── _build.html.haml │ │ ├── _build_parts.html.haml │ │ └── show.html.haml │ ├── dashboards/ │ │ └── build_history_by_worker.html.haml │ ├── layouts/ │ │ └── application.html.haml │ ├── merge_mailer/ │ │ ├── merge_failed.text.erb │ │ └── merge_successful.html.erb │ └── repositories/ │ ├── _form.html.haml │ ├── dashboard.html.haml │ ├── edit.html.haml │ ├── index.html.haml │ └── new.html.haml ├── bin/ │ ├── bundle │ ├── rails │ ├── rake │ ├── setup │ └── spring ├── config/ │ ├── application.dev.yml │ ├── application.rb │ ├── application.test.yml │ ├── application.yml │ ├── boot.rb │ ├── compass.rb │ ├── database.production.yml.sample │ ├── database.yml │ ├── deploy/ │ │ └── production.rb │ ├── deploy.rb │ ├── environment.rb │ ├── environments/ │ │ ├── development.rb │ │ ├── production.rb │ │ ├── staging.rb │ │ └── test.rb │ ├── initializers/ │ │ ├── backtrace_silencers.rb │ │ ├── cocaine.rb │ │ ├── inflections.rb │ │ ├── load_build_strategy.rb │ │ ├── mime_types.rb │ │ ├── readthis.rb │ │ ├── redis.rb │ │ ├── resque.rb │ │ ├── secret_token.rb │ │ ├── session_store.rb │ │ └── wrap_parameters.rb │ ├── kochiku.yml │ ├── kochiku.yml.sample │ ├── locales/ │ │ └── en.yml │ ├── resque_schedule.yml │ └── routes.rb ├── config.ru ├── db/ │ ├── migrate/ │ │ ├── 20110621212000_create_schema.rb │ │ ├── 20110624003418_change_artifact_type_to_name.rb │ │ ├── 20110624015709_rename_build_part_result_result_to_state.rb │ │ ├── 20110708203120_change_build_artifacts_for_carrier_wave.rb │ │ ├── 20110713175724_rename_build_part_result_to_build_part_run.rb │ │ ├── 20110713191536_add_foreign_key_indexes.rb │ │ ├── 20110719204508_create_projects.rb │ │ ├── 20110719205413_add_project_id_to_builds.rb │ │ ├── 20110721185201_rename_builds_sha_to_ref.rb │ │ ├── 20110801215540_rename_error_state_to_errored.rb │ │ ├── 20120803005242_add_merge_bool_to_build.rb │ │ ├── 20120817225343_add_branch_to_build.rb │ │ ├── 20121008211955_create_repositories.rb │ │ ├── 20121017173936_add_github_repository_id_to_repository.rb │ │ ├── 20121017182543_fix_repository_schema.rb │ │ ├── 20121017184946_remove_options_from_repository.rb │ │ ├── 20121017222538_add_target_name_to_builds.rb │ │ ├── 20121017224003_add_command_flag_to_repositories.rb │ │ ├── 20121018182435_add_options_to_build_part.rb │ │ ├── 20121024005715_add_send_build_failure_email_to_repository.rb │ │ ├── 20121024164929_record_build_failure_email_sent.rb │ │ ├── 20121024210129_add_success_script_to_repositories.rb │ │ ├── 20121024212949_add_on_success_log_file_to_build.rb │ │ ├── 20121030213442_add_queue_to_repository.rb │ │ ├── 20121101220831_add_timeout_to_repository.rb │ │ ├── 20130226232844_add_index_to_build_ref.rb │ │ ├── 20130409144945_add_on_success_note_to_repositories.rb │ │ ├── 20130511012855_add_deployable_map_to_build.rb │ │ ├── 20130626183046_add_maven_modules_to_build.rb │ │ ├── 20130627194433_add_index_to_build_part_paths.rb │ │ ├── 20130709123456_add_upload_artifacts_to_build_parts.rb │ │ ├── 20130822191419_add_queue_to_build_part.rb │ │ ├── 20130822231850_remove_upload_artifacts_from_build_parts.rb │ │ ├── 20130823210844_add_retry_count_to_build_part.rb │ │ ├── 20130823231854_remove_java_specific_stuff.rb │ │ ├── 20130823234546_remove_queue_override_from_repositories.rb │ │ ├── 20130910190203_add_repository_name_as_column.rb │ │ ├── 20131217022000_add_error_text_to_build.rb │ │ ├── 20140123234208_add_allows_kochiku_merges_to_repository.rb │ │ ├── 20140128180258_rename_auto_merge_on_build.rb │ │ ├── 20140415001051_remove_use_branches_on_green_from_repositories.rb │ │ ├── 20140415011144_remove_command_flag_from_repositories.rb │ │ ├── 20140506012721_unique_index_on_builds_ref.rb │ │ ├── 20140507184819_add_host_and_namespace_to_repositories.rb │ │ ├── 20140617214701_add_success_email.rb │ │ ├── 20140715225910_remove_notes.rb │ │ ├── 20141031234747_add_email_first_failure_to_repositories.rb │ │ ├── 20150324001246_remove_on_success_script_from_repositories.rb │ │ ├── 20150331160909_add_send_merge_successful_email.rb │ │ ├── 20150714234635_add_log_port_to_build_attempt.rb │ │ ├── 20150717214656_create_branches.rb │ │ ├── 20150717220149_assign_builds_to_branches.rb │ │ ├── 20150717231250_remove_branch_string_from_builds.rb │ │ ├── 20150719130110_index_repositories_namespace_and_name.rb │ │ ├── 20151111080255_remove_repo_cache_dir_from_repositories.rb │ │ ├── 20151114185514_fix_convergence_index.rb │ │ ├── 20160408214135_index_created_at_on_build_attempts.rb │ │ ├── 20170804214538_add_enabled_bool_to_repositories.rb │ │ ├── 20180208202524_add_test_command_to_builds.rb │ │ ├── 20180220185338_add_assume_lost_after_to_repository.rb │ │ ├── 20180227222254_add_initiated_by_to_builds.rb │ │ ├── 20180301221320_add_instance_type_to_build_attempts.rb │ │ └── 20180619210823_add_kochiku_yml_config_to_builds.rb │ ├── schema.rb │ └── seeds.rb ├── lib/ │ ├── build_strategies/ │ │ ├── no_op_build_strategy.rb │ │ └── production_build_strategy.rb │ ├── capistrano/ │ │ └── tasks/ │ │ ├── deploy.cap │ │ └── kochiku.cap │ ├── fileless_io.rb │ ├── git_blame.rb │ ├── git_merge_executor.rb │ ├── git_repo.rb │ ├── github_commit_status.rb │ ├── github_post_receive_hook.rb │ ├── github_request.rb │ ├── partitioner/ │ │ ├── base.rb │ │ ├── default.rb │ │ ├── dependency_map.rb │ │ ├── go.rb │ │ ├── maven.rb │ │ └── topological_sorter.rb │ ├── partitioner.rb │ ├── remote_server/ │ │ ├── github.rb │ │ └── stash.rb │ ├── remote_server.rb │ ├── server_settings.rb │ ├── settings_accessor.rb │ ├── stash_merge_executor.rb │ └── tasks/ │ ├── .gitkeep │ ├── kochiku.rake │ └── resque.rake ├── public/ │ ├── 404.html │ ├── 422.html │ ├── 500.html │ ├── fonts/ │ │ └── SQMarket-Regular.otf │ └── robots.txt ├── script/ │ ├── ci │ └── kochiku-build.sh.sample ├── spec/ │ ├── controllers/ │ │ ├── branches_controller_spec.rb │ │ ├── build_artifacts_controller_spec.rb │ │ ├── build_attempts_controller_spec.rb │ │ ├── build_parts_controller_spec.rb │ │ ├── builds_controller_spec.rb │ │ ├── dashboards_controller_spec.rb │ │ ├── pull_requests_controller_spec.rb │ │ ├── repositories_controller_spec.rb │ │ └── status_controller_spec.rb │ ├── decorators/ │ │ ├── branch_decorator_spec.rb │ │ └── build_part_decorator_spec.rb │ ├── features/ │ │ └── integration_spec.rb │ ├── fixtures/ │ │ ├── build_artifact.log │ │ ├── sample_github_webhook_payload.json │ │ └── stdout.log │ ├── helpers/ │ │ ├── application_helper_spec.rb │ │ ├── build_helper_spec.rb │ │ └── project_stats_helper_spec.rb │ ├── jobs/ │ │ ├── build_partitioning_job_spec.rb │ │ ├── build_state_update_job_spec.rb │ │ ├── enforce_timeouts_job_spec.rb │ │ ├── poll_repositories_job_spec.rb │ │ └── timeout_stuck_builds_job_spec.rb │ ├── lib/ │ │ ├── build_strategies/ │ │ │ └── production_build_strategy_spec.rb │ │ ├── git_blame_spec.rb │ │ ├── git_merge_executor_spec.rb │ │ ├── git_repo_spec.rb │ │ ├── github_commit_status_spec.rb │ │ ├── github_post_receive_hook_spec.rb │ │ ├── github_request_spec.rb │ │ ├── partitioner/ │ │ │ ├── default_spec.rb │ │ │ ├── dependency_map_spec.rb │ │ │ ├── go_spec.rb │ │ │ ├── maven_spec.rb │ │ │ └── shared_default_behavior.rb │ │ ├── partitioner_spec.rb │ │ ├── remote_server/ │ │ │ ├── github_spec.rb │ │ │ └── stash_spec.rb │ │ ├── remote_server_spec.rb │ │ ├── server_settings_spec.rb │ │ ├── settings_accessor_spec.rb │ │ └── stash_merge_executor_spec.rb │ ├── mailers/ │ │ ├── build_mailer_spec.rb │ │ ├── merge_mailer_spec.rb │ │ └── previews/ │ │ └── build_mailer_preview.rb │ ├── models/ │ │ ├── branch_spec.rb │ │ ├── build_artifact_spec.rb │ │ ├── build_attempt_spec.rb │ │ ├── build_part_spec.rb │ │ ├── build_spec.rb │ │ ├── repository_observer_spec.rb │ │ └── repository_spec.rb │ ├── routes_spec.rb │ ├── spec_helper.rb │ └── support/ │ ├── command_stubber.rb │ ├── custom_argument_matchers.rb │ ├── factories.rb │ ├── git_spec_helper.rb │ └── sha_helper.rb └── vendor/ └── assets/ ├── javascripts/ │ ├── jquery.flot.categories.js │ ├── jquery.flot.errorbars.js │ ├── jquery.flot.js │ ├── jquery.tablesorter.js │ ├── jquery.timeago.js │ ├── jquery.tipTip.js │ └── moment.js └── stylesheets/ ├── tablesorter.theme.kochiku.css └── tipTip.scss ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ /.bundle log/*.log tmp/ public/uploads public/log_files vendor/ruby # file with the real db config for production /config/database.production.yml # rspec /spec/examples.txt ================================================ FILE: .haml-lint.yml ================================================ linters: ConsecutiveSilentScripts: enabled: false IdNames: enabled: false InlineStyles: enabled: false InstanceVariables: enabled: false ImplicitDiv: enabled: false LineLength: enabled: false RuboCop: enabled: false SpaceInsideHashAttributes: style: no_space ViewLength: enabled: false ================================================ FILE: .rspec ================================================ --color ================================================ FILE: .rubocop.yml ================================================ inherit_from: .rubocop_todo.yml AllCops: TargetRubyVersion: 2.3 TargetRailsVersion: 4.2 DisplayCopNames: true Include: - 'Capfile' - 'Gemfile' - 'Rakefile' - 'config.ru' Exclude: - 'config/deploy.rb' - 'db/migrate/*' - 'db/schema.rb' - 'lib/ext/activerecord/persistence.rb' - 'vendor/**/*' Layout/EmptyLinesAroundBlockBody: Enabled: false Layout/EmptyLinesAroundClassBody: Enabled: false Layout/ExtraSpacing: Enabled: false Layout/MultilineBlockLayout: Exclude: - 'spec/lib/partitioner/maven_spec.rb' Lint/HandleExceptions: Exclude: - 'lib/git_repo.rb' Lint/NestedMethodDefinition: Exclude: - 'app/jobs/enforce_timeouts_job.rb' Lint/UselessAssignment: Exclude: - 'config/compass.rb' - 'spec/models/build_spec.rb' Metrics/MethodLength: Exclude: - 'lib/partitioner/pants.rb' Metrics/ClassLength: Enabled: false Rails: Enabled: true Style/BlockDelimiters: Enabled: false Style/DoubleNegation: Enabled: false Style/FrozenStringLiteralComment: # will enable this after Ruby 3.0 is released Enabled: false Style/GuardClause: # Personally disagree with this one. In certain situations not using a # GuardClause makes it more readable. Enabled: false Style/IfUnlessModifier: Enabled: false Style/MultilineBlockChain: Enabled: false Style/TrailingCommaInLiteral: Enabled: false ================================================ FILE: .rubocop_todo.yml ================================================ # This configuration was generated by # `rubocop --auto-gen-config` # on 2017-12-08 16:49:35 -0800 using RuboCop version 0.51.0. # The point is for the user to remove these configuration records # one by one as the offenses are removed from the code base. # Note that changes in the inspected code, or installation of new # versions of RuboCop, may require this file to be generated again. # Offense count: 3 # Cop supports --auto-correct. Layout/EmptyLineAfterMagicComment: Exclude: - 'lib/github_post_receive_hook.rb' - 'lib/partitioner/maven.rb' - 'spec/features/integration_spec.rb' # Offense count: 2 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, IndentationWidth. # SupportedStyles: special_inside_parentheses, consistent, align_braces Layout/IndentHash: Exclude: - 'app/views/branches/status_report.xml.builder' # Offense count: 8 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: auto_detection, squiggly, active_support, powerpack, unindent Layout/IndentHeredoc: Exclude: - 'spec/features/integration_spec.rb' - 'spec/lib/git_blame_spec.rb' - 'spec/lib/partitioner/maven_spec.rb' # Offense count: 11 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: symmetrical, new_line, same_line Layout/MultilineMethodCallBraceLayout: Exclude: - 'app/controllers/build_attempts_controller.rb' - 'spec/controllers/pull_requests_controller_spec.rb' - 'spec/jobs/build_partitioning_job_spec.rb' - 'spec/jobs/build_state_update_job_spec.rb' - 'spec/lib/github_commit_status_spec.rb' # Offense count: 11 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, IndentationWidth. # SupportedStyles: aligned, indented, indented_relative_to_receiver Layout/MultilineMethodCallIndentation: Exclude: - 'spec/lib/build_strategies/production_build_strategy_spec.rb' - 'spec/lib/git_merge_executor_spec.rb' - 'spec/lib/partitioner/maven_spec.rb' # Offense count: 10 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, SupportedStylesForEmptyBraces. # SupportedStyles: space, no_space # SupportedStylesForEmptyBraces: space, no_space Layout/SpaceBeforeBlockBraces: Exclude: - 'config/application.rb' - 'spec/controllers/branches_controller_spec.rb' - 'spec/controllers/build_artifacts_controller_spec.rb' - 'spec/controllers/repositories_controller_spec.rb' - 'spec/jobs/poll_repositories_job_spec.rb' - 'spec/lib/remote_server_spec.rb' - 'spec/models/build_spec.rb' # Offense count: 245 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, EnforcedStyleForEmptyBraces, SupportedStylesForEmptyBraces. # SupportedStyles: space, no_space, compact # SupportedStylesForEmptyBraces: space, no_space Layout/SpaceInsideHashLiteralBraces: Enabled: false # Offense count: 2 # Cop supports --auto-correct. Layout/SpaceInsidePercentLiteralDelimiters: Exclude: - 'spec/models/repository_spec.rb' # Offense count: 5 Lint/AmbiguousBlockAssociation: Exclude: - 'spec/controllers/branches_controller_spec.rb' - 'spec/controllers/build_parts_controller_spec.rb' - 'spec/jobs/build_state_update_job_spec.rb' - 'spec/jobs/enforce_timeouts_job_spec.rb' - 'spec/jobs/poll_repositories_job_spec.rb' # Offense count: 1 # Configuration parameters: AllowSafeAssignment. Lint/AssignmentInCondition: Exclude: - 'app/decorators/branch_decorator.rb' # Offense count: 3 Lint/RescueWithoutErrorClass: Exclude: - 'app/controllers/build_attempts_controller.rb' - 'app/jobs/job_base.rb' - 'lib/remote_server/stash.rb' # Offense count: 6 # Cop supports --auto-correct. # Configuration parameters: IgnoreEmptyBlocks, AllowUnusedKeywordArguments. Lint/UnusedBlockArgument: Exclude: - 'app/controllers/application_controller.rb' - 'spec/lib/git_blame_spec.rb' - 'spec/models/build_part_spec.rb' - 'spec/support/custom_argument_matchers.rb' - 'spec/support/factories.rb' # Offense count: 10 # Cop supports --auto-correct. # Configuration parameters: AllowUnusedKeywordArguments, IgnoreEmptyMethods. Lint/UnusedMethodArgument: Exclude: - 'app/decorators/branch_decorator.rb' - 'app/helpers/build_helper.rb' - 'app/jobs/build_attempt_job.rb' - 'lib/partitioner/default.rb' - 'lib/remote_server/stash.rb' - 'spec/mailers/previews/build_mailer_preview.rb' # Offense count: 48 Metrics/AbcSize: Max: 58 # Offense count: 128 # Configuration parameters: CountComments, ExcludedMethods. Metrics/BlockLength: Max: 550 # Offense count: 7 # Configuration parameters: CountComments. Metrics/ClassLength: Max: 232 # Offense count: 8 Metrics/CyclomaticComplexity: Max: 17 # Offense count: 1110 # Configuration parameters: AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, IgnoredPatterns. # URISchemes: http, https Metrics/LineLength: Max: 562 # Offense count: 58 # Configuration parameters: CountComments. Metrics/MethodLength: Max: 46 # Offense count: 7 Metrics/PerceivedComplexity: Max: 19 # Offense count: 1 # Configuration parameters: Blacklist. # Blacklist: END, (?-mix:EO[A-Z]{1}) Naming/HeredocDelimiterNaming: Exclude: - 'app/models/build.rb' # Offense count: 2 # Configuration parameters: NamePrefix, NamePrefixBlacklist, NameWhitelist, MethodDefinitionMacros. # NamePrefix: is_, has_, have_ # NamePrefixBlacklist: is_, has_, have_ # NameWhitelist: is_a? # MethodDefinitionMacros: define_method, define_singleton_method Naming/PredicateName: Exclude: - 'spec/**/*' - 'app/helpers/build_helper.rb' - 'app/models/build.rb' # Offense count: 10 # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: snake_case, camelCase Naming/VariableName: Exclude: - 'lib/remote_server/stash.rb' - 'spec/controllers/repositories_controller_spec.rb' - 'spec/models/repository_spec.rb' # Offense count: 18 # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: snake_case, normalcase, non_integer Naming/VariableNumber: Exclude: - 'spec/controllers/builds_controller_spec.rb' - 'spec/models/build_spec.rb' # Offense count: 3 # Cop supports --auto-correct. Performance/RegexpMatch: Exclude: - 'lib/git_blame.rb' - 'lib/remote_server/github.rb' - 'lib/remote_server/stash.rb' # Offense count: 4 # Cop supports --auto-correct. # Configuration parameters: NilOrEmpty, NotPresent, UnlessPresent. Rails/Blank: Exclude: - 'app/jobs/build_state_update_job.rb' - 'app/models/repository.rb' - 'lib/git_blame.rb' # Offense count: 1 # Cop supports --auto-correct. # Configuration parameters: Whitelist. # Whitelist: find_by_sql Rails/DynamicFindBy: Exclude: - 'spec/models/build_spec.rb' # Offense count: 4 Rails/FilePath: Exclude: - 'app/controllers/status_controller.rb' - 'db/seeds.rb' - 'spec/lib/build_strategies/production_build_strategy_spec.rb' - 'spec/spec_helper.rb' # Offense count: 2 Rails/OutputSafety: Exclude: - 'app/helpers/build_helper.rb' - 'app/helpers/mail_helper.rb' # Offense count: 18 # Configuration parameters: Blacklist. # Blacklist: decrement!, decrement_counter, increment!, increment_counter, toggle!, touch, update_all, update_attribute, update_column, update_columns, update_counters Rails/SkipsModelValidations: Exclude: - 'app/models/build.rb' - 'app/models/build_attempt.rb' - 'app/models/build_part.rb' - 'spec/controllers/pull_requests_controller_spec.rb' - 'spec/features/integration_spec.rb' - 'spec/helpers/project_stats_helper_spec.rb' - 'spec/mailers/build_mailer_spec.rb' - 'spec/models/build_part_spec.rb' - 'spec/models/build_spec.rb' # Offense count: 2 # Configuration parameters: Environments. # Environments: development, test, production Rails/UnknownEnv: Exclude: - 'app/models/repository_observer.rb' - 'config/initializers/readthis.rb' # Offense count: 2 # Cop supports --auto-correct. Security/YAMLLoad: Exclude: - 'lib/git_repo.rb' - 'lib/settings_accessor.rb' # Offense count: 92 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: braces, no_braces, context_dependent Style/BracesAroundHashParameters: Exclude: - 'app/views/branches/show.rss.builder' - 'app/views/branches/status_report.xml.builder' - 'db/seeds.rb' - 'lib/git_repo.rb' - 'lib/remote_server/stash.rb' - 'spec/controllers/pull_requests_controller_spec.rb' - 'spec/lib/git_blame_spec.rb' - 'spec/lib/partitioner/maven_spec.rb' - 'spec/lib/partitioner/shared_default_behavior.rb' - 'spec/mailers/build_mailer_spec.rb' - 'spec/models/build_part_spec.rb' - 'spec/models/build_spec.rb' # Offense count: 1 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, SingleLineConditionsOnly, IncludeTernaryExpressions. # SupportedStyles: assign_to_condition, assign_inside_condition Style/ConditionalAssignment: Exclude: - 'app/helpers/build_helper.rb' # Offense count: 52 Style/Documentation: Enabled: false # Offense count: 1 # Cop supports --auto-correct. Style/EmptyCaseCondition: Exclude: - 'app/models/build.rb' # Offense count: 4 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: compact, expanded Style/EmptyMethod: Exclude: - 'app/jobs/build_attempt_job.rb' - 'lib/build_strategies/no_op_build_strategy.rb' # Offense count: 1 # Cop supports --auto-correct. Style/Encoding: Exclude: - 'spec/features/integration_spec.rb' # Offense count: 1 # Configuration parameters: SupportedStyles. # SupportedStyles: annotated, template Style/FormatStringToken: EnforcedStyle: template # Offense count: 738 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, UseHashRocketsWithSymbolValues, PreferHashRocketsForNonAlnumEndingSymbols. # SupportedStyles: ruby19, hash_rockets, no_mixed_keys, ruby19_no_mixed_keys Style/HashSyntax: Enabled: false # Offense count: 1 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: keyword, braces Style/MultilineMemoization: Exclude: - 'lib/partitioner/dependency_map.rb' # Offense count: 2 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, MinBodyLength, SupportedStyles. # SupportedStyles: skip_modifier_ifs, always Style/Next: Exclude: - 'app/jobs/enforce_timeouts_job.rb' - 'lib/tasks/kochiku.rake' # Offense count: 16 # Cop supports --auto-correct. # Configuration parameters: Strict. Style/NumericLiterals: MinDigits: 11 # Offense count: 6 # Cop supports --auto-correct. # Configuration parameters: AutoCorrect, EnforcedStyle, SupportedStyles. # SupportedStyles: predicate, comparison Style/NumericPredicate: Exclude: - 'spec/**/*' - 'app/models/build.rb' - 'lib/git_repo.rb' - 'lib/partitioner/dependency_map.rb' # Offense count: 1 # Cop supports --auto-correct. Style/ParallelAssignment: Exclude: - 'app/helpers/project_stats_helper.rb' # Offense count: 68 # Cop supports --auto-correct. # Configuration parameters: PreferredDelimiters. Style/PercentLiteralDelimiters: Exclude: - 'config/deploy/production.rb' - 'db/seeds.rb' - 'lib/partitioner/dependency_map.rb' - 'spec/controllers/branches_controller_spec.rb' - 'spec/helpers/application_helper_spec.rb' - 'spec/jobs/build_state_update_job_spec.rb' - 'spec/lib/github_commit_status_spec.rb' - 'spec/lib/partitioner/dependency_map_spec.rb' - 'spec/lib/partitioner/maven_spec.rb' - 'spec/lib/partitioner/shared_default_behavior.rb' - 'spec/lib/settings_accessor_spec.rb' - 'spec/models/build_spec.rb' - 'spec/models/repository_spec.rb' # Offense count: 3 # Cop supports --auto-correct. Style/Proc: Exclude: - 'app/mailers/build_mailer.rb' - 'app/mailers/merge_mailer.rb' - 'config/application.rb' # Offense count: 2 # Cop supports --auto-correct. Style/RedundantBegin: Exclude: - 'lib/git_merge_executor.rb' - 'lib/github_post_receive_hook.rb' # Offense count: 4 # Cop supports --auto-correct. # Configuration parameters: AllowMultipleReturnValues. Style/RedundantReturn: Exclude: - 'app/models/branch.rb' - 'lib/partitioner/default.rb' - 'lib/remote_server/stash.rb' # Offense count: 12 # Cop supports --auto-correct. Style/RedundantSelf: Exclude: - 'app/models/branch.rb' - 'app/models/build.rb' - 'app/models/repository.rb' # Offense count: 4 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, AllowInnerSlashes. # SupportedStyles: slashes, percent_r, mixed Style/RegexpLiteral: Exclude: - 'app/controllers/repositories_controller.rb' - 'app/helpers/build_helper.rb' - 'config/routes.rb' - 'spec/models/build_spec.rb' # Offense count: 5 # Cop supports --auto-correct. # Configuration parameters: ConvertCodeThatCanStartToReturnNil. Style/SafeNavigation: Exclude: - 'app/models/build.rb' - 'app/models/build_attempt.rb' - 'app/models/build_part.rb' - 'lib/settings_accessor.rb' # Offense count: 2 # Cop supports --auto-correct. Style/SelfAssignment: Exclude: - 'lib/git_blame.rb' - 'lib/partitioner/default.rb' # Offense count: 1906 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, ConsistentQuotesInMultiline. # SupportedStyles: single_quotes, double_quotes Style/StringLiterals: Enabled: false # Offense count: 2 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles. # SupportedStyles: single_quotes, double_quotes Style/StringLiteralsInInterpolation: Exclude: - 'lib/remote_server/stash.rb' # Offense count: 14 # Cop supports --auto-correct. # Configuration parameters: MinSize, SupportedStyles. # SupportedStyles: percent, brackets Style/SymbolArray: EnforcedStyle: brackets # Offense count: 6 # Cop supports --auto-correct. # Configuration parameters: IgnoredMethods. # IgnoredMethods: respond_to, define_method Style/SymbolProc: Exclude: - 'app/controllers/dashboards_controller.rb' - 'app/helpers/project_stats_helper.rb' - 'app/models/branch.rb' - 'app/models/build.rb' - 'db/seeds.rb' - 'lib/partitioner/maven.rb' # Offense count: 2 # Cop supports --auto-correct. # Configuration parameters: EnforcedStyle, SupportedStyles, AllowSafeAssignment. # SupportedStyles: require_parentheses, require_no_parentheses, require_parentheses_when_complex Style/TernaryParentheses: Exclude: - 'app/models/build.rb' # Offense count: 1 # Cop supports --auto-correct. # Configuration parameters: AllowNamedUnderscoreVariables. Style/TrailingUnderscoreVariable: Exclude: - 'app/mailers/build_mailer.rb' # Offense count: 18 # Cop supports --auto-correct. # Configuration parameters: SupportedStyles, WordRegex. # SupportedStyles: percent, brackets Style/WordArray: EnforcedStyle: percent MinSize: 3 ================================================ FILE: .ruby-version ================================================ 2.4.3 ================================================ FILE: .travis.yml ================================================ sudo: false cache: bundler language: ruby rvm: - 2.4.4 - 2.5.1 before_install: # some ruby versions come with a broken version of rubygems, update to # consistent version - gem update --system 2.7.6 - gem install bundler -v '>= 1.16.1' script: - RAILS_ENV=test bundle exec rake --trace db:create db:migrate - bundle exec rspec - bundle exec rubocop - bundle exec haml-lint app/views/ ================================================ FILE: CONTRIBUTING.md ================================================ Contributing ============ If you would like to contribute code to Kochiku, thank you! You can do so through GitHub by forking the repository and sending a pull request. However, before your code can be accepted into the project we need you to sign Square's (super simple) [Individual Contributor License Agreement (CLA)][1]. [1]: https://spreadsheets.google.com/spreadsheet/viewform?formkey=dDViT2xzUHAwRkI3X3k5Z0lQM091OGc6MQ&ndplr=1 ================================================ FILE: Capfile ================================================ # Load DSL and Setup Up Stages require 'capistrano/setup' # Includes default deployment tasks require 'capistrano/deploy' # Includes tasks from other gems included in your Gemfile require 'capistrano/bundler' require 'capistrano/rails' # If you would like to use a Ruby version manager with kochiku # require it from a .cap file in lib/capistrano/tasks/. # # For more information see: # http://capistranorb.com/documentation/frameworks/rbenv-rvm-chruby/ # Loads custom tasks from `lib/capistrano/tasks' if you have any defined. Dir.glob('lib/capistrano/tasks/*.cap').sort.each { |r| import r } ================================================ FILE: Gemfile ================================================ source 'https://rubygems.org' gem 'actionpack-action_caching', '> 1.1.1' gem 'activemodel-serializers-xml' # required for xml serialization gem 'rails-controller-testing' # Included to provide 'assigns' method gem 'dynamic_form' gem 'passenger', '~> 4.0.41', group: :production gem 'rails', '~> 5.1.0' gem 'rails-observers' gem 'carrierwave' gem 'draper', '~> 3.0.1' gem 'mysql2', '>= 0.4.4' gem 'compass-rails' gem 'haml-rails' gem 'jquery-rails' gem 'sass-rails' gem 'uglifier' # therubyracer is a JS runtime required by execjs, which is in turn required # by uglifier. therubyracer is not the fastest option but it is the most portable. gem 'therubyracer' gem 'hiredis' # better Redis performance for usage as cache gem 'readthis' gem 'redis', require: ["redis", "redis/connection/hiredis"] gem 'resque', '~> 1.27.4' gem 'resque-retry' gem 'resque-scheduler', require: false gem 'json' # used by resque gem 'awesome_print', require: false gem 'chunky_png' gem 'cocaine' gem 'nokogiri', '~> 1.8', '>= 1.8.4' # 1.8.3 and below have known vulnerabilities gem 'posix-spawn' # used by cocaine gem 'pry-byebug' gem 'pry-rails' group :test, :development do gem 'factory_bot_rails' gem 'haml_lint', require: false gem 'rspec-collection_matchers' gem 'rspec-rails', '~> 3.0' gem 'rubocop', require: false end group :development do gem 'bullet' gem 'capistrano', '~> 3.0', require: false gem 'capistrano-bundler', '~> 1.1', require: false gem 'capistrano-rails', '~> 1.1', require: false gem 'capistrano-rvm', '~> 0.1', require: false gem 'rails-erd' gem 'thin' end group :test do gem 'capybara', '~> 2.3' gem 'fakeredis', :require => "fakeredis/rspec" gem 'webmock', require: false end ================================================ FILE: LICENSE.txt ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================ Kochiku - Distributed tests made easy ===================================== Kochiku is a distributed platform for test automation. It has three main components: - A **web server**, which lets you inspect builds and manage repositories - **Background jobs** that divide builds into distributable parts - **Workers** that run individual parts of a build A single machine typically runs the web server and background jobs, whereas many machines run workers. Use Kochiku to distribute large test suites quickly and easily. It's language agnostic; Use it for Ruby, Rails, Node.js, Ember, Java, C, C++ or anything else that runs in a unix environment. ### Git integration Kochiku currently integrates with git repositories stored in Github (including Github Enterprise) or Atlassian Bitbucket (formerly known as Stash). This lets Kochiku automatically run test suites for pull requests and commits to the master branch. Kochiku can also build any git revision on request. Support for headless git servers is coming soon. ## User Guide - [Installation & Deployment](https://github.com/square/kochiku/wiki/Installation-&-Deployment) - [Adding a repository](https://github.com/square/kochiku/wiki/How-to-add-a-repository-to-Kochiku) - [Initiating a build](https://github.com/square/kochiku/wiki/How-to-initiate-a-build-on-Kochiku) - [Hacking on Kochiku](https://github.com/square/kochiku/wiki/Hacking-on-Kochiku) - [Changelog](https://github.com/square/kochiku/wiki/CHANGELOG) - [Additional documentation](https://github.com/square/kochiku/wiki/_pages) ================================================ FILE: Rakefile ================================================ # Add your own tasks in files placed in lib/tasks ending in .rake, # for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. require File.expand_path('../config/application', __FILE__) require 'rake' Kochiku::Application.load_tasks ================================================ FILE: app/assets/javascripts/application.js ================================================ //= require jquery //= require jquery_ujs //= require jquery.tipTip //= require jquery.tablesorter //= require jquery.timeago //= require jquery.flot //= require jquery.flot.errorbars //= require jquery.flot.categories //= require moment //= require_self moment.lang('en', { calendar: { sameDay: 'h:mma', lastDay: 'ddd ha', lastWeek: 'ddd', sameElse: 'M/D' } }); Kochiku = {}; StartTimes = {}; Kochiku.delayedRefresh = function(updateInfo) { var now = new Date(); $(updateInfo.table).find('tr:has(.running)').each( function() { var startTime = new Date(Date.parse(StartTimes[$(this).data('id')])); $(this).find('.elapsed').text( Math.floor((now-startTime)/60000) + ":" + ("00" + (Math.round((now-startTime)/1000)%60)).slice(-2)); }); setTimeout(function() { if($('input#refresh').is(':checked')) { $.getJSON(document.URL + '/modified_time', function( data ) { var buildTime = Date.parse(data); var renderTime = updateInfo.renderTime; if(buildTime > renderTime) { Kochiku.buildInfo.renderTime = buildTime; //keep the updated at display up to date var timeAgo = new Date(renderTime).toISOString(); var updateDisplay = $("#time-since-update"); updateDisplay.timeago("update", timeAgo); updateDisplay.prop("title", timeAgo); //ajax in changed parts Kochiku.updateBuildParts(renderTime); } else if ($(".queue-position-value").length > 0) { // if there are displayed queue positions refresh Kochiku.updateBuildParts(renderTime); } }); Kochiku.delayedRefresh(Kochiku.buildInfo); } }, 5000); }; jQuery(document).ready(function() { jQuery("abbr.timeago").timeago(); }); Kochiku.updateBuildParts = function(renderTime) { $.getJSON(document.URL + '/refresh_build_part_info', { modified_time: renderTime }, function( data ) { $.each(data,function(index, el) { if (el.state != Kochiku.buildInfo.state) { if ( $.inArray(el.state, Kochiku.terminalStates) != -1) { switch(el.state) { case "succeeded": case "passed": status = "✅ " + el.state; break; case "failed": status = "🚫 " + el.state; break; default: status = el.state; } Kochiku.notify(status + " " + Kochiku.doneMessage + Kochiku.buildInfo.repo + "/" + Kochiku.buildInfo.branch); } window.location.reload(); } var row; row = $(Kochiku.buildInfo.table).find("[data-id='" + el.id + "']"); if (row) { row.replaceWith(el.content); } }); //reload the table after its updated $("table.tablesorter").trigger("update", [true]); }); } Kochiku.notify = function(message) { options = {body: message} if ( "Notification" in window && Notification.permission !== "denied") { Notification.requestPermission(function (permission) { if (permission === "granted") { var notification = new Notification("Kochiku Build Status", options); } }); } }; Kochiku.graphBuildTimes = function(repositoryPath, branchName) { var url = '/' + repositoryPath + '/' + branchName + '/build-time-history', colors = { cucumber: 'hsl(87, 63%, 47%)', spec: 'hsl(187, 63%, 47%)', jasmine: 'hsl(27, 63%, 47%)', maven: 'hsl(207, 63%, 47%)', unit: 'hsl(187, 63%, 47%)', integration: 'hsl(87, 63%, 47%)', acceptance: 'hsl(207, 63%, 47%)' }; $.getJSON(url, function(data) { var plot = $('#plot'), series = []; for (var label in data) { var points = data[label].slice(-20), lastTime = null; for (var i = 0; i < points.length; i++) { var ref = $('') .attr('href', '/' + repositoryPath + '/builds/' + points[i][4]) .attr('class', 'build-status ' + points[i][5]) .text(points[i][0]).wrap('
'), time = moment(points[i][6]).calendar().replace(/m$/,''); if (time != lastTime) { ref.after($('
') .text('Minutes (Min to Max)') .appendTo(plot); }); }; (function() { var statuses = [ 'Errored', 'Aborted', 'Failed', 'Running', 'Runnable', 'Passed' ]; $.tablesorter.addParser({ id: 'state', type: 'numeric', is: function(s) { return statuses.indexOf(s) !== -1; }, format: function(s) { return statuses.indexOf(s.replace(/^\s+|\s+$/g, '')); } }); })(); function timeToSeconds(time) { var timeArr = time.split(':') if (timeArr.length === 2) { return Number(timeArr[0])*60 + Number(timeArr[1]) } else { return Number(timeArr[0])*3600 + Number(timeArr[1])*60 + Number(timeArr[2]) } } (function() { $.tablesorter.addParser({ id: 'elapsedTime', type: 'numeric', is: function(s) { return /^([0-9]*:(?=[0-9]{2}:))?([0-5]?[0-9])(:[0-5][0-9])$/.test(s); }, format: function(s) { return timeToSeconds(s); } }); })(); ================================================ FILE: app/assets/stylesheets/screen.sass ================================================ @import compass @import compass/reset @import compass/css3/border-radius /* SQ Market font @font-face font-family: "SQ Market" src: url("/fonts/SQMarket-Regular.otf") format("opentype") /* Variables $font-base: Helvetica Neue, Helvetica, Arial, sans-serif $font-code: Inconsolata, Menlo, Monaco, monospace $color-base: #52585D $color-title: #52585D $color-link: #2D81C5 $color-passed: hsl(90, 90%, 70%) $color-runnable: hsl(216, 40%, 90%) $color-running: hsl(216, 90%, 70%) $color-failed: hsl(0, 90%, 70%) $color-errored: hsl(48, 90%, 70%) $color-aborted: hsl(0, 0%, 70%) /* Mixins =has-layout display: block zoom: 1 =envelope +has-layout position: relative overflow: hidden =safe-envelope +has-layout &:after content: " " display: block height: 0 clear: both overflow: hidden visibility: hidden =text-shadow text-shadow: 0 1px 0 rgba(0, 0, 0, 0.5) =box-sizing($box-sizing: border-box) box-sizing: $box-sizing -webkit-box-sizing: $box-sizing -moz-box-sizing: $box-sizing =border-radius($radius: 0) -webkit-border-radius: $radius -moz-border-radius: $radius border-radius: $radius /* Styles html, body height: 100% body, input font-family: $font-base font-size: 12px #page margin: 1em 40px min-height: 100% h1, h2, h3, h4, h5, h6 font-weight: 300 a color: $color-link text-decoration: none strong font-weight: bold code font-family: $font-code input[type=text], input[type=email], input[type=password] font-family: $font-base outline: none !important width: 400px border-radius: 3px -webkit-box-shadow: 0 2px 2px rgba(black, 0.1) inset -moz-box-shadow: 0 2px 2px rgba(black, 0.1) inset border: solid 1px rgba(black, 0.3) padding: 4px color: #202122 &:focus border-color: hsla(207, 63%, 47%, 0.6) input[type=submit], button, .button +border-radius(3px) box-shadow: 0 1px 0 rgba(white, 0.1) inset, 0 0 3px rgba(black, 0.2) border: solid 1px rgba(black, 0.2) background: $color-link linear-gradient(to bottom, transparent, rgba(black, 0.2)) color: white text-shadow: 0 -1px 0 rgba(black, 0.2) padding: 4px 8px a color: white &:active box-shadow: 0 1px 3px rgba(black, 0.2) inset color: rgba(white, 0.95) // Tooltip overrides #tiptip_content font-size: 14px /* Index .section-wrapper overflow: hidden border-bottom: solid 1px #e2e2e2 .section +envelope margin: 1em .flash text-align: center font-size: 14px padding: 10px border-radius: 12px margin-bottom: 10px font-weight: bold &.message background-color: #91f991 &.error background-color: #ff9999 &.warn background-color: #edfa3f #disabled-repo-alert color: #8a6d3b background-color: #fcf8e3 border-color: #faebcc font-size: 14px #header a.logo float: left h1 font-family: "SQ Market", $font-base font-size: 28px height: 37px color: $color-title + #666 b color: $color-title .translation font-size: 14px ul.links display: inline-block li display: inline-block margin-right: 1em form display: inline-block #nav padding-bottom: 30px h2.subheader color: $color-title + #222 font-size: 175% padding: 0 0 25px .info, .actions font-size: 12px font-weight: normal .actions float: right padding-top: 5px form display: inline label margin-left: 1em min-width: 0 h3 color: $color-title + #222 font-size: 125% padding: 0 0 10px .info + .info margin-left: 8px padding-left: 8px border-left: solid 1px rgba(0, 0, 0, 0.3) form display: inline-block .log_contents font-family: monospace min-height: 300px white-space: pre-wrap .performance font-size: 1.1em float: right width: 300px text-align: right .great color: $color-passed .decent color: $color-errored .bad color: $color-failed .label cursor: help .build-stats margin-bottom: 2em width: 700px #plot margin-bottom: 2em min-width: 1000px height: 180px .legend table width: auto td padding: 2px .xAxis text-align: center font-size: 12px a font-family: $font-code .build-status position: relative .build-status:before position: absolute left: 50% top: -15px width: 8px height: 8px font-size: 8px margin: 0 margin-left: -5px border-radius: 2px time display: block font-size: 10px margin-top: 2px .axis-label position: absolute color: #666 &.y -webkit-transform: rotate(-90deg) left: -45px top: 45% .flot-overlay, .flot-y-axis // make the x-axis clickable z-index: -1 #tooltip position: absolute display: none border: 1px solid #52585D padding: 2px background-color: #B8BEC3 font-size: 130% opacity: 0.8 .build-state .state font-weight: bold .queue font-weight: bold .build overflow: hidden margin: 0 0 50px .build-wrapper +envelope color: #666 display: block .build-info float: left .ref font-size: 175% font-family: $font-code line-height: 40px letter-spacing: 1px &:hover color: $color-link .times float: right margin: 10px 20px line-height: 15px text-align: right .build-id +text-shadow font-family: $font-base float: right font-size: 400% font-weight: bold text-align: right $part-margin: 2px .parts +envelope margin: 10px (-$part-margin) .part overflow: hidden float: left border: 1px solid rgba(0, 0, 0, 0.1) border-radius: 3px margin: $part-margin box-shadow: 0 1px 1px rgba(0, 0, 0, 0.3) .part.runnable background-color: $color-runnable .part.running background-color: $color-running .part.passed background-color: $color-passed .part.failed background-color: $color-failed .part.errored background-color: $color-errored .part.aborted background-color: $color-aborted .part-wrapper +envelope display: block margin: 5px width: 115px height: 25px color: #333 .kind, .attempts display: inline-block .kind font-size: 175% font-weight: bold line-height: 27px .attempts .attempt overflow: hidden .paths-tooltip border: 1px solid rgba(0, 0, 0, 0.5) background-color: #fff color: #333 font-size: 120% font-family: $font-code max-width: 500px !important li margin: 5px 0 .build-status, .part-status, .attempt-status &:before content: " " border: 1px solid rgba(black, 0.4) border-radius: 3px display: inline-block width: 10px height: 10px text-align: center margin-right: 2px box-shadow: 0 1px 1px rgba(black, 0.1) font-size: 10px font-family: verdana, sans-serif font-weight: bold color: rgba(black, 0.4) &.runnable:before, &.partitioning:before background-color: $color-runnable &.running:before background-color: $color-running &.failed:before, &.errored:before, &.aborted:before, &.doomed:before content: "!" &.failed:before, &.doomed:before background-color: $color-failed &.errored:before background-color: $color-errored &.aborted:before background-color: $color-aborted &.passed:before, &.succeeded:before background-color: $color-passed h2 .build-status:before vertical-align: middle /* New build in header .header-right float: right line-height: 32px margin-top: 5px input.ref margin: 0 width: 200px font-size: 100% /* New build page input.ref width: 500px font-family: $font-code font-size: 150% input.build-button font-size: 120% input[disabled] background-color: #e8e8e8 background-image: linear-gradient(#fcfcfc,#e8e8e8) border: 1px solid #e8e8e8 border-top-color: #e8e8e8 color: #999 /* health pretends to be a button .health float: right margin: 8px 5px table table-layout: fixed width: 100% tr:nth-child(even) background-color: rgba(0, 0, 0, 0.05) tbody tr:hover background-color: #e4edfa !important th font-weight: bold border-bottom: 1px solid rgba(0, 0, 0, 0.1) padding: 0 5px 5px cursor: pointer white-space: nowrap &.headerSortDown:after content: " ▲" font-size: 8px &.headerSortUp:after content: " ▼" font-size: 8px code font-size: 15px &.id width: 50px &.whisker width: 45px &.queue-position width: 50px &.status width: 70px &.ruby-version width: 100px &.type width: 156px &.worker width: 156px &.time width: 80px &.count width: 50px &.actions width: 70px &.first-path width: 100px &.date width: 180px td padding: 5px white-space: nowrap overflow-x: scroll /* Fix chrome 67 formating issue */ position: relative &::-webkit-scrollbar display: none &.whisker padding-right: 2px .part-status:before content: "" width: 1px height: 10px margin-right: -1px border-color: rgba(black, 0.2) .part-status.attempt-0:before height: 1px border-color: transparent -webkit-box-shadow: none .part-status.attempt-1:before height: 1px .part-status.attempt-2:before height: 3px .part-status.attempt-3:before height: 5px .part-status.attempt-4:before height: 7px .part-status.attempt-5:before height: 9px th.right, td.right text-align: right td .paths color: #999 .root color: black td.wrap white-space: normal line-height: 1.6 tr:target td background-color: #ffc /* Search by build revision .select_commit margin-bottom: 20px #build_ref_input width: 100px /* Build show .build-info-subheader margin: (-15px) 0 25px .build-summary margin-bottom: 10px /* Build parts show table.build-part-info margin-bottom: 20px ol#build-paths margin-bottom: 20px list-style: square inside none line-height: 150% .new-repository-link padding-bottom: 20px .projects-grid text-align: center .projects +pie-clearfix margin-right: 0.75em margin-left: 0.75em li.build-info margin: 0.3em 0 &.bold font-weight: bold .ci-build-info position: relative display: inline-block width: 188px height: 188px border-radius: 10px text-align: center vertical-align: top margin: 0.5em color: rgba(black, 0.8) text-shadow: rgba(white, 0.1) 0 1px 0 +box-shadow(0 1px 2px rgba(black, 0.5), inset 0 0 3px rgba(black, 0.3)) a color: rgba(white, 0.95) text-shadow: rgba(black, 0.2) 0 -1px 0 .project-name font-size: 26px font-weight: 300 a display: block padding: 55px 0 10px letter-spacing: 0.05em .state padding-top: 6px .project-link display: none position: absolute bottom: 10px width: 100% &:hover .project-link display: block .ci-errored, .ci-doomed, .ci-failed background-color: desaturate(darken($color-failed, 10%), 20%) font-weight: bold .ci-partitioning, .ci-runnable, .ci-running background-color: desaturate(darken($color-running, 10%), 20%) .ci-succeeded background-color: desaturate(darken($color-passed, 20%), 20%) .ci-unknown, .ci-aborted background-color: desaturate(darken($color-aborted, 10%), 20%) font-weight: bold /* Repository form styles fieldset legend font-weight: bold padding: 0.5em 0 margin: 1em 0 #repository-form label display: inline-block width: 220px // Give more spacing to the checkboxes #repository-form > div, #repository-form fieldset > div min-height: 28px #repository-form input[type='checkbox'] margin-top: 8px #repository-form .short width: 150px .delete-form float: right margin-top: -28px .danger-button background-color: $color-failed !important #branch-delete-warning color: #B8BEC3 /* END Repository form styles .build-error pre font-family: monospace background-color: rgba(0, 0, 0, 0.05) padding: 1em margin-bottom: 2em h2 padding: 0.5em font-size: 1.5em font-weight: bolder color: darken($color-failed, 20%) .build-empty div display: block padding: 1.2em margin-bottom: 2em background-color: rgba(0, 0, 0, 0.05) h2 padding: 0.5em font-size: 1.5em font-weight: bolder .hint font-style: italic p line-height: 1.4em #worker-health-wrap overflow: auto .worker-health table-layout: auto // Firefox doesn't support a fixed table with // a scroll overflow, so just hide the overflow on Firefox. // It also doesn't react to just overflow-x, we need to hide // overflow in both axes @-moz-document url-prefix() table td overflow: hidden ================================================ FILE: app/controllers/application_controller.rb ================================================ class ApplicationController < ActionController::Base include BuildHelper rescue_from ActiveRecord::RecordNotFound do |exception| render file: "#{Rails.public_path}/404.html", layout: false, status: 404 end end ================================================ FILE: app/controllers/branches_controller.rb ================================================ class BranchesController < ApplicationController caches_action :show, :build_time_history, cache_path: proc { load_repository_and_branch { :modified => [@branch.updated_at.to_i, @repository.updated_at.to_i].max } } caches_action :status_report, expires_in: 15.seconds # lists all convergence branches as well the 100 most recently active # branches def index load_repository @convergence_branches = @repository.branches.where(convergence: true) @recently_active_branches = @repository.branches.where(convergence: false).order('updated_at DESC').limit(100) end def show load_repository_and_branch @build = @branch.builds.build @builds = @branch.builds.includes(build_parts: :build_attempts).last(12) @current_build = @builds.last @build_parts = {} @builds.reverse_each do |build| build.build_parts.each do |build_part| key = [build_part.paths.first, build_part.kind, build_part.options['ruby']] (@build_parts[key] ||= {})[build] = build_part end end @branch = @branch.decorate respond_to do |format| format.html # Note: appending .rss to the end of the url will not return RSS format # due to the very permissive branch id constraint. Instead users will # have to specify a query param of format=rss to receive the RSS feed. format.rss { @builds = @builds.reverse } # most recent first format.json end end def request_new_build load_repository_and_branch ref = @repository.sha_for_branch(@branch.name) existing_build = @repository.build_for_commit(ref) if existing_build.present? flash[:warn] = "Did not find a new commit on the #{@branch.name} branch to build" redirect_to repository_branch_path(@repository, @branch) else build = @branch.builds.build(ref: ref, state: 'partitioning') if build.save flash[:message] = "New build started for #{build.ref} on #{@branch.name}" redirect_to repository_build_path(@repository, build) else flash[:error] = "Error adding build! #{build.errors.full_messages.to_sentence}" redirect_to repository_branch_path(@repository, @branch) end end end def health load_repository_and_branch initialize_stats_variables load_build_stats @builds = @branch.builds.includes(:build_parts => :build_attempts).last(params[:count] || 12) build_part_attempts = Hash.new(0) build_part_failures = Hash.new(0) failed_parts = {} @builds.each do |build| build.build_parts.each do |build_part| key = [build_part.paths.sort, build_part.kind] build_part.build_attempts.each do |build_attempt| if build_attempt.successful? build_part_attempts[key] = build_part_attempts[key] + 1 elsif build_attempt.unsuccessful? build_part_attempts[key] = build_part_attempts[key] + 1 build_part_failures[key] = build_part_failures[key] + 1 failed_parts[key] = (failed_parts[key] || []) << build_part end end end end @part_climate = {} failed_parts.each do |key, parts| part_error_rate = (build_part_failures[key] * 100 / build_part_attempts[key]) @part_climate[[part_error_rate, key]] = parts.uniq end @branch = @branch.decorate end def build_time_history load_repository_and_branch respond_to do |format| format.json do render json: @branch.decorate.build_time_history.to_json end end end # GET /XmlStatusReport.aspx # # This action returns the current build status for all of the convergence branches # in the system def status_report @branches = Branch.includes(:repository).where(convergence: true).decorate end def badge @repository || load_repository @branch ||= @repository.branches.where(name: params[:branch]).first! build = @branch.most_recent_build if build.succeeded? send_file('public/images/passing.svg', type: 'image/svg+xml', disposition: 'inline') elsif build.failed? send_file('public/images/failing.svg', type: 'image/svg+xml', disposition: 'inline') else # in progress send_file('public/images/pending.svg', type: 'image/svg+xml', disposition: 'inline') end end private def load_repository r_namespace, r_name = params[:repository_path].split('/') @repository = Repository.where(namespace: r_namespace, name: r_name).first! end def load_repository_and_branch @repository || load_repository @branch ||= @repository.branches.where(name: params[:id]).first! end # set the various stats variables to reasonable null values in case # the load_build_stats method short-circuits def initialize_stats_variables @days_since_first_build = 0 @total_build_count = 0 @total_failure_count = 0 @total_pass_rate = '—' @last30_build_count = 0 @last30_failure_count = 0 @last30_pass_rate = '—' @last7_build_count = 0 @last7_failure_count = 0 @last7_pass_rate = '—' end def load_build_stats @first_built_date = @branch.builds.first.try(:created_at) return if @first_built_date.nil? @days_since_first_build = (Time.zone.today - @first_built_date.to_date).to_i @total_build_count = @branch.builds.count @total_failure_count = @branch.builds.where.not(state: 'succeeded').count @total_pass_rate = (@total_build_count - @total_failure_count) * 100 / @total_build_count @last30_build_count = @branch.builds.where('created_at >= ?', Time.zone.today - 30.days).count return if @last30_build_count.zero? @last30_failure_count = @last30_build_count - @branch.builds.where('state = "succeeded" AND created_at >= ?', Time.zone.today - 30.days).count @last30_pass_rate = (@last30_build_count - @last30_failure_count) * 100 / @last30_build_count @last7_build_count = @branch.builds.where('created_at >= ?', Time.zone.today - 7.days).count return if @last7_build_count.zero? @last7_failure_count = @last7_build_count - @branch.builds.where('state = "succeeded" AND created_at >= ?', Time.zone.today - 7.days).count @last7_pass_rate = (@last7_build_count - @last7_failure_count) * 100 / @last7_build_count end end ================================================ FILE: app/controllers/build_artifacts_controller.rb ================================================ class BuildArtifactsController < ApplicationController def create @build_artifact = BuildArtifact.new @build_artifact.build_attempt_id = params[:build_attempt_id] @build_artifact.log_file = params[:build_artifact].try(:[], :log_file) respond_to do |format| if @build_artifact.save format.xml { head :created, :location => @build_artifact.log_file.url } else format.xml { render :xml => @build_artifact.errors, :status => :unprocessable_entity } end end end # A redirect is preferable to direct linking if logs are stored remotely with expiring urls. def show build_artifact = BuildArtifact.find(params[:id]) redirect_to build_artifact.log_file.url end end ================================================ FILE: app/controllers/build_attempts_controller.rb ================================================ require 'json' require 'net/http' class BuildAttemptsController < ApplicationController def start @build_attempt = BuildAttempt.find(params[:id]) respond_to do |format| if @build_attempt.aborted? format.json { render :json => @build_attempt } elsif @build_attempt.start!(params[:builder]) @build_attempt.log_streamer_port = params[:logstreamer_port] @build_attempt.instance_type = params[:instance_type] if params[:instance_type].present? @build_attempt.save format.json { render :json => @build_attempt } else format.json { render :json => @build_attempt.errors, :status => :unprocessable_entity } end end end def finish @build_attempt = BuildAttempt.find(params[:id]) respond_to do |format| if @build_attempt.finish!(params[:state]) format.json { head :ok } format.html do redirect_to repository_build_part_url(@build_attempt.build_part.build_instance.repository, @build_attempt.build_part.build_instance, @build_attempt.build_part) end else format.json { render :json => @build_attempt.errors, :status => :unprocessable_entity } end end end # Redirects to the build_part page since we don't have a page for a single build attempt. # Added as a shortcut method to use when the IDs of the relation chain is not handy. def show @build_attempt = BuildAttempt.find(params[:id]) redirect_to repository_build_part_url( @build_attempt.build_part.build_instance.repository, @build_attempt.build_part.build_instance, @build_attempt.build_part, anchor: helpers.dom_id(@build_attempt)) end def stream_logs @build_attempt = BuildAttempt.find(params[:id]) unless @build_attempt.log_streamer_port || @build_attempt.builder render plain: "No log streaming available for this build attempt", status: 404 end # if full log has already been uploaded, redirect there if (stdout_log = @build_attempt.build_artifacts.stdout_log.try(:first)) redirect_to stdout_log return end @build = @build_attempt.build_instance @repository = @build.repository @build_part = @build_attempt.build_part end # basically proxies request to the appropriate worker def stream_logs_chunk @build_attempt = BuildAttempt.find(params[:id]) start = params.fetch(:start, 0) max_bytes = params.fetch(:maxBytes, 250000) port = @build_attempt.log_streamer_port builder = @build_attempt.builder if !port || !builder render json: {"error" => "No log streaming available for this build attempt"}, status: 500 return end # logstreamer_base_url = "http://#{builder}:#{port}" http = Net::HTTP.new(builder, port) http.read_timeout = 5 response = begin http.get("/build_attempts/#{@build_attempt.id}/log/stdout.log?start=#{start}&maxBytes=#{max_bytes}") rescue false end if !response || response.code !~ /^2/ render json: {"error" => "unable to reach log streamer"}, status: 500 return end output_json = JSON.parse(response.body) output_json['state'] = @build_attempt.state render json: output_json end end ================================================ FILE: app/controllers/build_parts_controller.rb ================================================ class BuildPartsController < ApplicationController before_action :load_repository_build_and_part, only: [:rebuild, :show, :modified_time, :refresh_build_part_info] before_action only: [:show, :refresh_build_part_info] do calculate_build_attempts_position(@build_part.build_attempts, @build_part.queue) end include BuildAttemptsQueuePosition caches_action :show, cache_path: proc { { modified: [@build_part.updated_at.to_i, @repository.updated_at.to_i].max, queue_position: Digest::SHA1.hexdigest(@build_attempts_rank.values.join(',')) } } def show respond_to do |format| format.html format.json do render :json => @build_part, include: { build_attempts: { methods: :files } } end end end def rebuild begin @build_part.rebuild! rescue GitRepo::RefNotFoundError flash[:error] = "It appears the commit #{@build.ref} no longer exists." end redirect_to [@repository, @build] end def modified_time respond_to do |format| format.json do render :json => @build_part.updated_at end end end def refresh_build_part_info updates = [] if @build_part.finished_at updates << { state: @build_part.status } else @build_part.build_attempts.each_with_index do |attempt, index| html = ApplicationController.render(partial: 'build_parts/build_attempts', locals: { index: index, attempt: attempt, build_attempts_rank: @build_attempts_rank}) updates << {id: index, content: html, state: @build_part.status} end end respond_to do |format| format.json do render :json => updates end end end private def load_repository_build_and_part r_namespace, r_name = params[:repository_path].split('/') @repository = Repository.where(namespace: r_namespace, name: r_name).first! @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:build_id]) @build_part = @build.build_parts.find(params[:id]) end end ================================================ FILE: app/controllers/builds_controller.rb ================================================ require 'git_repo' class BuildsController < ApplicationController before_action :load_repository, :only => [:show, :retry_partitioning, :rebuild_failed_parts, :request_build, :abort, :toggle_merge_on_success, :build_status, :modified_time, :refresh_build_part_info, :resend_status] before_action only: [:show, :refresh_build_part_info] do @build = Build.includes(build_parts: :build_attempts) .joins(:branch_record).where('branches.repository_id' => @repository.id) .find(params[:id]) calculate_build_parts_position(@build) format_build_parts_position end include BuildAttemptsQueuePosition caches_action :show, cache_path: proc { updated_at = Build.select(:updated_at).find(params[:id]).updated_at { build_modified: [updated_at.to_i, @repository.updated_at.to_i].max, queue_position: Digest::SHA1.hexdigest(@build_attempts_rank.values.join(',')) } } def show respond_to do |format| format.html format.json { render :json => @build, include: { build_parts: { methods: [:status] } } } format.png do # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.21 headers['Expires'] = CGI.rfc1123_date(Time.now.utc) send_data(@build.to_png, :type => 'image/png', :disposition => 'inline') end end end # Public: Kickoff a build from the kochiku CLI script # # repo_url - The remote url for the git repository # git_sha - (optional) the SHA of the specific git commit the user is requesting to build # git_branch - String name of the git branch to perform the build of. If # 'git_sha' is not specified then it will use HEAD of the git branch. # merge_on_success - Bool. Request kochiku automatically merge the branch if the build succeeds. # def create merge_on_success = (params[:merge_on_success] || false) repository = Repository.lookup_by_url(params[:repo_url]) unless repository raise ActiveRecord::RecordNotFound, "Repository for #{params[:repo_url]} not found" end if params[:git_sha].present? build = repository.build_for_commit(params[:git_sha]) if build head :ok, :location => repository_build_url(repository, build) return end end branch = repository.branches.where(name: params[:git_branch]).first_or_create! ref_to_build = if params[:git_sha].present? params[:git_sha] else repository.sha_for_branch(branch.name) end build = branch.builds.build(ref: ref_to_build, state: 'partitioning', merge_on_success: merge_on_success) if build.save head :ok, :location => repository_build_url(repository, build) else render :plain => build.errors.full_messages.join('\n'), :status => :unprocessable_entity end end def retry_partitioning @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id]) # This means there was an error with the partitioning job; redo it if @build.build_parts.empty? @build.update_attributes! :state => 'partitioning', :error_details => nil @build.enqueue_partitioning_job end redirect_to [@repository, @build] end def rebuild_failed_parts @build = Build.includes(build_parts: :build_attempts) .joins(:branch_record).where('branches.repository_id' => @repository.id) .find(params[:id]) @build.build_parts.failed_errored_or_aborted.each do |part| # There is an exceptional case in Kochiku where a build part's prior attempt may have # passed but the latest attempt failed. We do not want to rebuild those parts. part.rebuild! if part.unsuccessful? end @build.update_attributes! state: 'running' redirect_to [@repository, @build] end def abort @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id]) @build.abort! redirect_to repository_build_path(@repository, @build) end def toggle_merge_on_success @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id]) @build.update_attributes!(:merge_on_success => params[:merge_on_success]) redirect_to repository_build_path(@repository, @build) end def build_status @build = @repository ? @repository.builds.find(params[:id]) : Build.find(params[:id]) respond_to do |format| format.json do render :json => @build end end end def modified_time updated_at = Build.joins(:branch_record).where('branches.repository_id' => @repository.id) .find(params[:id]).updated_at respond_to do |format| format.json do render :json => updated_at end end end def resend_status @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id]) BuildStateUpdateJob.enqueue(@build.id) redirect_to repository_build_path(@repository, @build) end def refresh_build_part_info updates = [] last_modified = Time.zone.at(params[:modified_time].to_i / 1000.0) if @build.completed? updates << { state: @build.state } else updatd_parts = @build.build_parts.where("updated_at > ? OR id in (?)", last_modified, @build_parts_position.keys) updatd_parts.each do |part| html = ApplicationController.render(partial: 'builds/build_parts', locals: { part: part.decorate, build: @build, build_parts_position: @build_parts_position, repository: @repository }) updates << {id: part.id, content: html, state: @build.state} end end respond_to do |format| format.json do render :json => updates end end end def build_redirect build_instance = Build.find(params[:id]) redirect_to repository_build_path(build_instance.repository, build_instance) end def build_ref_redirect # search prefix so that entire git ref does not have to be provided. build_instance = Build.where("ref LIKE ?", "#{params[:ref]}%").first redirect_to repository_build_path(build_instance.repository, build_instance) end private def load_repository if params[:repository_path] r_namespace, r_name = params[:repository_path].split('/') @repository = Repository.where(namespace: r_namespace, name: r_name).first! end end def format_build_parts_position @build_parts_position = {} @build_attempts_rank&.each do |build_attempt_id, position| next if position.nil? build_part_id = BuildAttempt.find(build_attempt_id).build_part.id if @build_parts_position[build_part_id].nil? @build_parts_position[build_part_id] = position elsif @build_parts_position[build_part_id] > position @build_parts_position[build_part_id] = position end end end end ================================================ FILE: app/controllers/concerns/build_attempts_queue_position.rb ================================================ module BuildAttemptsQueuePosition extend ActiveSupport::Concern # keep_rank is only true if we are calling calculate_build_attempts_position multiple times on the # same build because that build has multiple queues def calculate_build_attempts_position(build_attempts, queue, keep_rank: false) @build_attempts_rank = {} unless keep_rank jobs = Resque.redis.lrange("queue:#{queue}", 0, -1) return if jobs.blank? build_attempts&.each do |build_attempt| next unless build_attempt.state == 'runnable' id = build_attempt.id.to_s @build_attempts_rank[id] = jobs.index { |job| /"build_attempt_id\":#{id}/.match(job) } end end def calculate_build_parts_position(build) @build_attempts_rank = {} parts_by_queue = Hash.new([]) build_attempts = build.build_attempts.includes(:build_part).where(state: 'runnable') build_attempts.each do |attempt| parts_by_queue[attempt.build_part.queue] += [attempt] end parts_by_queue.each do |queue, attempts| calculate_build_attempts_position(attempts, queue, keep_rank: true) end end end ================================================ FILE: app/controllers/dashboards_controller.rb ================================================ class DashboardsController < ApplicationController def build_history_by_worker build_attempts = BuildAttempt.where("builder IS NOT NULL").order('id DESC').limit(params[:count] || 2000).select(:id, :builder, :state) @workers = build_attempts.group_by { |ba| ba.builder } @partition_jobs = Build.order('id DESC').limit(150).select(:id, :state).includes(:build_parts) end end ================================================ FILE: app/controllers/pull_requests_controller.rb ================================================ require 'remote_server' class PullRequestsController < ApplicationController def build if params['payload'] # from stash handle_stash_request(JSON.parse(params['payload'])) else # from github handle_github_request(params) end render json: {message: "Thanks!"} end def handle_stash_request(payload) @repo = get_repo(payload['repository']['url']) if payload['pull_request'] && active_pull_request?(payload['action']) branch_name = get_branch_name(payload['pull_request']['head']['ref']) sha = payload['pull_request']['head']['sha'] handle_pull_request(branch_name, sha) elsif payload['ref'] branch_name = get_branch_name(payload['ref']) sha = payload['after'] handle_repo_push_request(branch_name, sha) end end def handle_github_request(payload) @repo = get_repo(payload['repository']['ssh_url']) pull_request = payload['pull_request'] if payload['pull_request'] && active_pull_request?(pull_request['state']) branch_name = get_branch_name(pull_request['head']['ref']) sha = pull_request['head']['sha'] handle_pull_request(branch_name, sha) elsif payload['ref'] branch_name = get_branch_name(payload['ref']) sha = payload['head_commit']['id'] handle_repo_push_request(branch_name, sha) end end private def get_repo(url) Repository.lookup_by_url(url) end def handle_repo_push_request(branch_name, sha) return unless @repo if @repo.run_ci? branch = fetch_branch(branch_name) branch.kickoff_new_build_unless_currently_busy(sha) if branch.present? && branch.convergence? end end def handle_pull_request(branch_name, sha) return unless @repo if @repo.build_pull_requests branch = fetch_branch(branch_name, true) build = @repo.ensure_build_exists(sha, branch) branch.abort_in_progress_builds_behind_build(build) end end def get_branch_name(ref) ref.sub(%r{\Arefs/heads/}, '') end def fetch_branch(name, auto_create = false) auto_create ? @repo.branches.where(name: name).first_or_create! : @repo.branches.where(name: name).first end def active_pull_request?(action) action && action != "closed" end end ================================================ FILE: app/controllers/repositories_controller.rb ================================================ class RepositoriesController < ApplicationController def create if params.fetch(:repository)[:url].blank? redirect_to new_repository_path, error: "Missing required value: Repository URL" return end @repository = Repository.new(repository_params) # persist the repository and then create initial Branch records for the # convergence branches if @repository.save && update_convergence_branches redirect_to repository_branches_path(@repository) else @current_convergence_branches = params.fetch(:convergence_branches, "").split(',') render template: 'repositories/new' end end def new @repository = Repository.new @repository.run_ci = true @current_convergence_branches = ['master'] end def destroy ActiveRecord::Base.no_touching do Repository.destroy(params[:id]) end redirect_to repositories_path end def update @repository = Repository.find(params[:id]) if @repository.update_attributes(repository_params) && update_convergence_branches flash[:message] = "Settings updated." redirect_to repository_edit_url(@repository) else @current_convergence_branches = params.fetch(:convergence_branches, "").split(',') render template: 'repositories/edit' end end def edit r_namespace, r_name = params[:repository_path].split('/') @repository = Repository.where(namespace: r_namespace, name: r_name).first! @current_convergence_branches = @repository.branches.where(convergence: true).select(:name).collect(&:name) end def index @repositories = Repository.all end def dashboard @branches = Branch.joins(:repository) .includes(:repository) .where(name: 'master') .order('repositories.name') .decorate end # build_ref is intended to be used by the Stash webhooks plugin # https://marketplace.atlassian.com/plugins/com.atlassian.stash.plugin.stash-web-post-receive-hooks-plugin def build_ref repository = Repository.find(params[:id]) # Query string parameters are provided for easy integrations, since it the # simplest to implement. changes = if params[:refChanges] params[:refChanges].map do |change| [ change[:refId].gsub(/^refs\/heads\//, ''), change[:toHash] ] end else [params.values_at(:ref, :sha)] end result = changes.map do |ref, sha| ensure_build(repository, ref, sha) end render json: { builds: result.map { |build| { id: build.id, build_url: repository_build_url(repository, build) } } } end def ensure_build(repository, branch_name, sha) branch = repository.branches.where(name: branch_name).first_or_create! build = repository.ensure_build_exists(sha, branch) branch.abort_in_progress_builds_behind_build(build) unless branch.convergence? build end private def repository_params params.require(:repository) .permit(:enabled, :url, :timeout, :build_pull_requests, :run_ci, :on_green_update, :send_build_success_email, :send_build_failure_email, :allows_kochiku_merges, :email_on_first_failure, :send_merge_successful_email, :assume_lost_after) end # update_convergence_branches is called by both create and update. This # method does more work than is necessary for create but it is used to avoid # duplicating code. def update_convergence_branches new_branch_names = params.fetch(:convergence_branches, "").split(',').map(&:strip) current_convergence_branches = @repository.branches.where(convergence: true).all current_branch_names = current_convergence_branches.collect(&:name) remove_convergence_from = current_branch_names - new_branch_names add_convergence_to = new_branch_names - current_branch_names remove_convergence_from.each do |name| current_convergence_branches.detect { |branch| branch.name == name }.update!(convergence: false) end add_convergence_to.each do |name| branch = @repository.branches.where(name: name).first_or_create! branch.update!(convergence: true) end true end end ================================================ FILE: app/controllers/status_controller.rb ================================================ class StatusController < ApplicationController def available if File.exist?(Rails.root.join("tmp/maintenance")) head :service_unavailable else head :ok end end end ================================================ FILE: app/decorators/branch_decorator.rb ================================================ require 'set' class BranchDecorator < Draper::Decorator delegate_all def most_recent_build_state object.most_recent_build.try(:state) || 'unknown' end def last_build_duration object.last_completed_build.try(:elapsed_time) end # Recent build timing information grouped by test types. def build_time_history(fuzzy_limit = 1000) result = Hash.new { |hash, key| hash[key] = [] } builds = {} build_types = Set.new object.timing_data_for_recent_builds.each do |timing_data| next if timing_data.empty? build_type = timing_data.shift # the type of test that was executed (e.g. cucumber) build_id = timing_data[4] # e.g 65874 build_types.add(build_type) builds[build_id] ||= {} builds[build_id][build_type] = timing_data end builds.keys.sort.each do |build| build_types.each do |build_type| timing_data = builds[build][build_type] || [] # jquery.flot dislikes missing data result[build_type] << timing_data end end result end end ================================================ FILE: app/decorators/build_part_decorator.rb ================================================ class BuildPartDecorator < Draper::Decorator delegate_all def most_recent_stdout_artifact BuildArtifact .joins(:build_attempt => :build_part) .where( 'build_attempts.build_part_id' => object.id, 'build_attempts.state' => BuildAttempt::COMPLETED_BUILD_STATES ).stdout_log.last end end ================================================ FILE: app/helpers/application_helper.rb ================================================ module ApplicationHelper def duration_strftime(duration_in_seconds, format = "%H:%M:%S") return "N/A" if duration_in_seconds.nil? || (duration_in_seconds.respond_to?(:nan?) && duration_in_seconds.nan?) (Time.mktime(0) + duration_in_seconds).strftime(format).sub(/^00[ :h]+0?/, "") end def time_for(time, format = "%H:%M") time.strftime(format) end def build_success_in_words(build) case build.state when 'succeeded' 'success' when 'errored', 'doomed' 'failed' else build.state.to_s end end def build_activity(build) return "Unknown" unless build.is_a?(Build) case build.state when 'partitioning', 'runnable', 'running' "Building" when 'doomed', 'failed', 'succeeded', 'errored' "CheckingModifications" end end def link_to_commit(repo, commit_sha) link_to(commit_sha[0, 7], show_link_to_commit(repo, commit_sha)) end def link_to_branch(build) branch_record = build.branch_record branch_name = branch_record.name link_to(branch_name, branch_record.repository.get_branch_url(branch_name)) end def show_link_to_commit(repo, commit_sha) repo.remote_server.href_for_commit(commit_sha).to_s end def show_link_to_compare(build, first_commit_hash, second_commit_hash) repo = build.repository attrs_from_remote_server = RemoteServer.for_url(repo.url) if attrs_from_remote_server.class == RemoteServer::Stash second_commit_hash = repo.on_green_update.blank? ? "" : repo.on_green_update.split(',').first end attrs_from_remote_server.url_for_compare(first_commit_hash, second_commit_hash) end def show_link_to_create_pull_request(build) build.repository.open_pull_request_url(build.branch_record.name) end def timeago(time, options = {}) options[:class] ||= "timeago" content_tag(:abbr, time.to_s, options.merge(:title => time.getutc.iso8601)) if time end end ================================================ FILE: app/helpers/build_helper.rb ================================================ module BuildHelper def build_metadata_headers(build, display_ruby_version) headers = [] headers << "Ruby Version" if display_ruby_version if is_a_build_with_one_part?(build) headers << "Target" else headers << "Paths" end headers end def build_metadata_values(build, build_part, display_ruby_version) values = [] values << build_part.options["ruby"] if display_ruby_version values << format_paths(build_part) values end def format_paths(build_part) if build_part.options['total_workers'] && build_part.options['worker_chunk'] build_part.paths.first + " - Chunk #{build_part.options['worker_chunk']} of #{build_part.options['total_workers']}" elsif build_part.paths.size == 1 if build_part.paths.first == "/dev/null" build_part.kind else build_part.paths.first end else first, *rest = build_part.paths first = first.sub(/([^\/]+)/, '\1') paths = [first, rest].join(', ') "#{build_part.paths.length} (#{paths})".html_safe end end def multiple_ruby_versions?(build) build.build_parts.map { |bp| bp.options['ruby'] }.compact.uniq.size > 1 end def is_a_build_with_one_part?(build) build.build_parts.none? { |build_part| build_part.paths.size > 1 } end def eligible_for_merge_on_success?(build) !build.succeeded? && !build.branch_record.convergence? && build.repository.allows_kochiku_merges? end end ================================================ FILE: app/helpers/mail_helper.rb ================================================ module MailHelper def failed_build_part_sentence(build_part) stdout_log = build_part.most_recent_stdout_artifact str = "failed after #{build_part.elapsed_time.to_i / 60} minutes" if stdout_log str += ", for details you can go directly to the #{link_to('stdout', build_artifact_url(stdout_log))} log." end str.html_safe end def failed_build_paths(build_part) paths = build_part.paths str = if build_part.kind.include?('spec') paths.map { |path| path.split('/').last } else paths end str.join(', ').truncate(200) end end ================================================ FILE: app/helpers/project_stats_helper.rb ================================================ module ProjectStatsHelper def pass_rate_css_class(rate) case rate.to_i when 0..39 then 'bad' when 40..75 then 'decent' else 'great' end end def rebuild_count_css_class(attempts) case attempts when 0..1 then 'great' when 1..4 then 'decent' else 'bad' end end # A string representing the percentage of builds that eventually passed def eventual_pass_rate(builds) pass_rate_text(builds.select(&:succeeded?).size / builds.size.to_f) end # A string representing the percentage of the builds that had # all tests pass on the first try. def error_free_pass_rate(builds) error_free_count = builds.to_a.count do |build| build.succeeded? && build.build_parts.all_passed_on_first_try? end total_count = builds.to_a.count(&:completed?) pass_rate_text(error_free_count / total_count.to_f) end def pass_rate_text(number) format("%1.0f%%", 100 * number) end # Calculates the average number of rebuilds required before builds succeed. # Only considers builds that are successful because builds that are not yet # successful would skew the calculation. def average_number_of_rebuilds(builds) successful_builds = builds.select(&:succeeded?) total_build_parts, total_build_attempts = 0, 0 successful_builds.each do |build| total_build_attempts += build.build_attempts.count total_build_parts += build.build_parts.count end (total_build_attempts - total_build_parts) / successful_builds.size.to_f end def median_elapsed_time(builds) successful_builds = builds.select(&:succeeded?) elapsed_times = successful_builds.map { |build| build.elapsed_time || 0 } times = elapsed_times.length if times.zero? nil else elapsed_times.sort! (elapsed_times[(times - 1) / 2] + elapsed_times[times / 2]) / 2.0 end end def seconds_to_minutes(seconds) (seconds / 60).round if seconds.is_a?(Numeric) end end ================================================ FILE: app/jobs/build_attempt_job.rb ================================================ require 'job_base' # Keep this interface so we can easily enqueue new jobs. # The job is handled by kochiku-worker class BuildAttemptJob < JobBase class WrongBuildAttemptJobClassError < StandardError; end def initialize(build_options) raise WrongBuildAttemptJobClassError, "BuildAttemptJob was processed by the BuildAttemptJob shim in Kochiku instead of real class in Kochiku-worker." end def perform end end ================================================ FILE: app/jobs/build_initiated_by_job.rb ================================================ require 'job_base' require 'git_repo' class BuildInitiatedByJob < JobBase extend Resque::Plugins::Retry @queue = :low @retry_limit = 5 @retry_exceptions = {GitRepo::RefNotFoundError => [60, 60, 60, 180, 360], Cocaine::ExitStatusError => [30, 60, 60, 60, 60] } def initialize(build_id) @build = Build.find(build_id) end def perform return if @build.initiated_by email = GitBlame.last_email_in_branch(@build).first if email.present? @build.update_attributes(initiated_by: email) end end end ================================================ FILE: app/jobs/build_partitioning_job.rb ================================================ require 'job_base' require 'git_repo' require 'partitioner' class BuildPartitioningJob < JobBase extend Resque::Plugins::Retry @queue = :partition @retry_limit = 5 @retry_exceptions = {GitRepo::RefNotFoundError => [60, 60, 60, 180, 360], Cocaine::ExitStatusError => [30, 60, 60, 60, 60] } def initialize(build_id) @build = Build.find(build_id) end def perform if @build.test_command.blank? error_message = "No test_command specified in kochiku.yml." @build.update!(:error_details => { :message => error_message, :backtrace => nil }, :state => 'errored') else partitioner = Partitioner.for_build(@build) parts = partitioner.partitions if parts.empty? && partitioner.partitioner_type == "Go" @build.update!(:state => 'succeeded') else @build.partition(parts) end end @build.update_commit_status! end def on_exception(e) if self.class.retry_exception?(e) && !self.class.retry_limit_reached? @build.update_attributes!(:state => :waiting_for_sync) else @build.update_attributes!( :state => 'errored', :error_details => { :message => e.to_s, :backtrace => e.backtrace.join("\n") } ) @build.update_commit_status! end super end end ================================================ FILE: app/jobs/build_state_update_job.rb ================================================ require 'job_base' require 'git_repo' require 'github_commit_status' # this job updates the remote repo. it is enqueued when a build's state changes. class BuildStateUpdateJob < JobBase @queue = :high def initialize(build_id) @build_id = build_id end def perform build = Build.find(@build_id) # notify github/stash that the build status has changed build.update_commit_status! # trigger another build for this branch if there is unbuilt commits if build.branch_record.convergence? && build.completed? sha = build.repository.sha_for_branch(build.branch_record.name) build.branch_record.kickoff_new_build_unless_currently_busy(sha) end build.send_build_status_email! if build.succeeded? if !build.on_success_script_log_file.present? && build.on_success_script.present? BuildStrategy.run_success_script(build) end end if build.promotable? build.promote! elsif build.merge_on_success_enabled? if build.mergable_by_kochiku? # ACHTUNG merge to master isn't right anymore. This part my have been changed by shenil build.merge_to_master! else Rails.logger.warn("Build #{build.id} has merge_on_success enabled but cannot be merged.") end end end end ================================================ FILE: app/jobs/enforce_timeouts_job.rb ================================================ # The EnforceTimeoutsJob searches for BuildAttempts that were picked up by a # kochiku worker but never heard back from again. It compares (Time.now - # started_at) against the timeout value of the repository. If the maximum time # has elapsed, it will mark the BuildAttempt as errored and kick off a rebuild. class EnforceTimeoutsJob def self.perform # The EnforceTimeoutsJob runs frequently so we do not check BuildAttempts greater than 1 day old BuildAttempt.where("created_at > ? AND state = 'running' AND started_at IS NOT NULL", 1.day.ago).each do |attempt| lenient_timeout = attempt.build_instance.repository.timeout + 5 if attempt.elapsed_time > lenient_timeout.minutes # Error artifact creation taken from kochiku-worker message = StringIO.new message.puts("This BuildAttempt has not been updated by its worker,\n" \ "and has been running longer then the timeout so it has\n" \ "been considered lost by Kochiku.") message.rewind def message.path 'error.txt' end BuildArtifact.create(:build_attempt_id => attempt.id, :log_file => message) attempt.update!(state: 'errored', finished_at: Time.current) Rails.logger.error "Errored BuildAttempt:#{attempt.id} due to timeout" # Enqueue another BuildAttempt if this is the most recent attempt for the BuildPart part = attempt.build_part part.rebuild! if part.build_attempts.last == attempt end end end end ================================================ FILE: app/jobs/job_base.rb ================================================ class JobBase class << self def enqueue(*args) Resque.enqueue(self, *args) end def enqueue_on(build_queue, *args) Resque::Job.create(build_queue, self, *args) Resque::Plugin.after_enqueue_hooks(self).each do |hook| klass.send(hook, *args) end end def perform(*args) job = new(*args) job.perform rescue => e if job job.on_exception(e) else raise e end end end def on_exception(e) raise e end end ================================================ FILE: app/jobs/poll_repositories_job.rb ================================================ class PollRepositoriesJob def self.perform Repository.where(enabled: true).find_each(batch_size: 10) do |repo| branch = repo.convergence_branches.first || repo.branches.where(name: 'master').first if branch.nil? Rails.logger.warn("[PollRepositoriesJob] Could not find a branch to check for repo #{repo.name_with_namespace}") end begin head = repo.sha_for_branch(branch.name) rescue RemoteServer::AccessDenied, RemoteServer::RefDoesNotExist, Zlib::BufError => e Rails.logger.error("[PollRepositoriesJob] Exception #{e} occurred for repo #{repo.id}:#{repo.name_with_namespace}. Automatically setting the repository to disabled.") repo.update!(enabled: false) next end unless repo.build_for_commit(head) branch.builds.create!(ref: head, state: 'partitioning') Rails.logger.info "Build created for #{repo.namespace}/#{repo.name}:#{branch.name} at #{head}" end sleep 0.5 # take a breath end end end ================================================ FILE: app/jobs/timeout_stuck_builds_job.rb ================================================ class TimeoutStuckBuildsJob < JobBase @queue = :high def self.perform clean_lost_builds clean_runnable_not_queued end def self.clean_runnable_not_queued # check for builds in runnable that are no longer in the queue missing = [] BuildAttempt.select("build_attempts.id", " build_parts.queue as queue").joins(:build_part) .where("build_attempts.state = 'runnable' AND build_attempts.created_at < ? AND build_attempts.created_at > ?", 5.minutes.ago, 1.day.ago) .group_by(&:queue) .each do |queue, attempts| current_queue = Resque.redis.lrange("queue:#{queue}", 0, -1).to_s missing += attempts.reject { |attempt| current_queue.match(/build_attempt_id\\*\"\:#{attempt.id}[^0-9]/) } end missing.select! { |build_attempt_partial| BuildAttempt.find(build_attempt_partial.id).state == 'runnable' } missing.each { |build_attempt_partial| BuildAttempt.find(build_attempt_partial.id).finish!('errored') } end def self.clean_lost_builds # check for builds that have hit their assume_lost_after Repository.where("assume_lost_after IS NOT NULL").find_each do |repo| repo.build_attempts.where("build_attempts.state = 'running' AND build_attempts.started_at < ?", repo.assume_lost_after.minutes.ago).each do |build_attempt| build_attempt.finish!('errored') end end end end ================================================ FILE: app/mailers/build_mailer.rb ================================================ class BuildMailer < ActionMailer::Base helper :application, :mail default :from => Proc.new { Settings.sender_email_address } private def pull_request_link(build) @build = build remote_server = @build.repository.remote_server if remote_server.class == RemoteServer::Stash && !@build.branch_record.convergence? begin id, _ = remote_server.get_pr_id_and_version(@build.branch_record.name) return "#{remote_server.base_html_url}/pull-requests/#{id}/overview" rescue RemoteServer::StashAPIError # not all branches will have an open pull request return nil end end nil end public def error_email(build_attempt, error_text = nil) @build_part = build_attempt.build_part @builder = build_attempt.builder @error_text = error_text mail :to => Settings.kochiku_notifications_email_address, :subject => "[kochiku] Build part errored on #{@builder}", :from => Settings.sender_email_address end def build_break_email(build) @build = build # Allow the partitioner to be selective about who is emailed partitioner = Partitioner.for_build(@build) @responsible_email_and_files = partitioner.emails_for_commits_causing_failures @emails = @responsible_email_and_files.keys if @emails.empty? @emails = if @build.branch_record.convergence? GitBlame.emails_since_last_green(@build) else GitBlame.emails_in_branch(@build) end end @git_changes = if @build.branch_record.convergence? GitBlame.changes_since_last_green(@build) else GitBlame.changes_in_branch(@build) end @failed_build_parts = @build.build_parts.failed_or_errored.decorate @pr_link = pull_request_link(build) mail :to => @emails, :bcc => Settings.kochiku_notifications_email_address, :subject => "[kochiku] Failure - #{@build.branch_record.name} build for #{@build.repository.name}", :from => Settings.sender_email_address end def build_success_email(build) @build = build @email = GitBlame.last_email_in_branch(@build) @git_changes = GitBlame.changes_in_branch(@build) @pr_link = pull_request_link(build) mail :to => @email, :bcc => Settings.kochiku_notifications_email_address, :subject => "[kochiku] Success - #{@build.branch_record.name} build for #{@build.repository.name}", :from => Settings.sender_email_address end end ================================================ FILE: app/mailers/merge_mailer.rb ================================================ class MergeMailer < ActionMailer::Base helper :application default :from => Proc.new { Settings.sender_email_address } def merge_successful(build, merge_commit, emails, stdout_and_stderr) @build = build @merge_commit = merge_commit @stdout_and_stderr = stdout_and_stderr mail(:to => emails, :bcc => Settings.kochiku_notifications_email_address, :subject => "[kochiku] Merged #{@build.branch_record.name} branch for #{@build.repository.name}") end def merge_failed(build, emails, stdout_and_stderr) @build = build @stdout_and_stderr = stdout_and_stderr mail(:to => emails, :bcc => Settings.kochiku_notifications_email_address, :subject => "[kochiku] Failed to merge #{@build.branch_record.name} branch for #{@build.repository.name}") end end ================================================ FILE: app/models/branch.rb ================================================ class Branch < ActiveRecord::Base belongs_to :repository has_many :builds, :dependent => :destroy, :inverse_of => :branch_record validates :name, :presence => true def to_param self.name end def kickoff_new_build_unless_currently_busy(ref) last_build = builds.last if last_build && !last_build.completed? last_build else builds.create_with(state: 'partitioning').find_or_create_by!(ref: ref) end end def abort_in_progress_builds_behind_build(current_build) builds.where(state: Build::IN_PROGRESS_STATES).readonly(false) .reject { |build| build.id >= current_build.id } .each { |build| build.abort! } end def most_recent_build @most_recent_build ||= builds.last end def last_completed_build @last_completed_build ||= builds.completed.last end # The fuzzy_limit is used to set a upper bound on the amount of time that the # sql query will take def timing_data_for_recent_builds(fuzzy_limit = 1000) id_cutoff = builds.maximum(:id).to_i - fuzzy_limit self.class.connection.execute(build_time_history_sql(id_cutoff)) end private def build_time_history_sql(min_build_id) return <<-SQL SELECT build_parts.kind AS kind, SUBSTR(builds.ref, 1, 5) AS ref, IFNULL(FLOOR(ROUND(MAX(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)), 0) AS max, IFNULL(FLOOR(ROUND(MAX(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)) - FLOOR(ROUND(MIN(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)), 0) AS min_diff, 0 AS max_diff, builds.id, builds.state, builds.created_at FROM builds LEFT JOIN build_parts ON build_parts.build_id = builds.id LEFT JOIN build_attempts ON build_attempts.build_part_id = build_parts.id WHERE builds.branch_id = #{id} AND builds.id >= #{min_build_id} AND (build_attempts.id IS NULL OR build_attempts.id = ( SELECT id FROM build_attempts WHERE build_part_id = build_parts.id ORDER BY id DESC LIMIT 1 )) GROUP BY builds.id, build_parts.kind, builds.state, builds.created_at SQL end end ================================================ FILE: app/models/build.rb ================================================ require 'on_success_uploader' require 'fileless_io' require 'build_partitioning_job' require 'build_initiated_by_job' class Build < ActiveRecord::Base # using 'branch_record' instead of 'branch' because Build has a legacy 'branch' string type column. The legacy column will be removed soon. belongs_to :branch_record, :class_name => "Branch", :foreign_key => "branch_id", :inverse_of => :builds, :touch => true has_one :repository, :through => :branch_record has_many :build_parts, :dependent => :destroy, :inverse_of => :build_instance do def not_passed_and_last_attempt_in_state(*state) joins(:build_attempts).joins(<<-EOSQL).where("build_attempts.state" => state, "passed_attempt.id" => nil, "newer_attempt.id" => nil) LEFT JOIN build_attempts AS passed_attempt ON build_attempts.build_part_id = passed_attempt.build_part_id AND passed_attempt.state = 'passed' LEFT JOIN build_attempts AS newer_attempt ON build_attempts.build_part_id = newer_attempt.build_part_id AND newer_attempt.id > build_attempts.id EOSQL end def passed joins(:build_attempts).where("build_attempts.state" => 'passed').group("build_parts.id") end def failed not_passed_and_last_attempt_in_state('failed') end def failed_or_errored not_passed_and_last_attempt_in_state('failed', 'errored') end def failed_errored_or_aborted not_passed_and_last_attempt_in_state('failed', 'errored', 'aborted') end def errored not_passed_and_last_attempt_in_state('errored') end def all_passed_on_first_try? successful_build_attempts = joins(:build_attempts).where("build_attempts.state" => 'passed').count unsuccessful_build_attempts = joins(:build_attempts).where("build_attempts.state != ?", 'passed').count successful_build_attempts > 0 && unsuccessful_build_attempts == 0 end end has_many :build_attempts, :through => :build_parts TERMINAL_STATES = %w[failed succeeded errored aborted].freeze FAILED_STATES = %w[failed errored doomed].freeze IN_PROGRESS_STATES = %w[waiting_for_sync partitioning runnable running doomed].freeze STATES = IN_PROGRESS_STATES + TERMINAL_STATES validates :state, inclusion: { in: STATES } serialize :error_details, Hash serialize :kochiku_yml_config, Hash validates :branch_id, presence: true validates :ref, presence: true, length: { is: 40, allow_blank: true }, uniqueness: { scope: :branch_id, allow_blank: true } mount_uploader :on_success_script_log_file, OnSuccessUploader after_commit :enqueue_partitioning_job, :on => :create after_commit :enqueue_initiated_by, on: :create scope :completed, -> { where(state: TERMINAL_STATES) } def test_command tc = self[:test_command] if tc.nil? tc = (kochiku_yml && kochiku_yml.key?('test_command')) ? kochiku_yml['test_command'] : repository.test_command self.update_attributes(test_command: tc) end tc end def on_success_script (kochiku_yml && kochiku_yml.key?('on_success_script')) ? kochiku_yml['on_success_script'] : nil end def previous_build branch_record.builds.where("id < ?", self.id).order("id DESC").first end def previous_successful_build Build.where(branch_id: self.branch_id, state: 'succeeded').where("id < ?", self.id).order("id DESC").first end def enqueue_partitioning_job Resque.enqueue(BuildPartitioningJob, self.id) if repository.enabled? end def kochiku_yml if @kochiku_yml.nil? # try to load the kochiku.yml info from the build's repo, if it is not in the DB already update_attributes!(kochiku_yml_config: GitRepo.load_kochiku_yml(repository, ref)) if kochiku_yml_config.empty? # if there's actually no kochiku.yml file for the build, the kochiku_yml_config # attribute would still be empty, even after the above update. @kochiku_yml = kochiku_yml_config.empty? ? false : kochiku_yml_config else @kochiku_yml end end def partition(parts) return unless repository.enabled? transaction do update_attributes!(:state => 'runnable') parts.each do |part| build_parts.create!(:kind => part['type'], :paths => part['files'], :queue => part['queue'], :retry_count => part['retry_count'], :options => part['options']) end end build_parts.each { |build_part| build_part.create_and_enqueue_new_build_attempt! } end def update_state_from_parts! return if build_parts.empty? errored = build_parts.errored passed = build_parts.passed failed = build_parts.failed next_state = case when (build_parts - passed).empty? 'succeeded' when self.state == 'aborted' 'aborted' when errored.any? 'errored' when (passed | failed).count == build_parts.count 'failed' else failed.empty? ? 'running' : 'doomed' end previous_state = self.state update_attributes!(:state => next_state) unless previous_state == next_state [previous_state, next_state] end def update_commit_status! repository.remote_server.update_commit_status!(self) end # As implemented, finished_at will return the wrong value if there is a # unsuccessful attempt following a successful one. Left this way for # performance and simplicity. def finished_at build_attempts.maximum(:finished_at) end def elapsed_time last_finished_at = finished_at return nil if last_finished_at.blank? last_finished_at - created_at end def linear_time build_parts.inject(0) do |sum, part| sum + (part.elapsed_time || 0) end end def retry_count build_parts.sum(0) do |part| part.build_attempts.count - 1 end end def max_retries build_parts.max_by { |part| part.build_attempts.count }.build_attempts.count - 1 end # This can be used as `building_time` under the assumption that # all parts executed in parallel. def longest_build_part build_parts.max_by { |part| part.elapsed_time || 0 }.elapsed_time end def idle_time (elapsed_time || 0) - (longest_build_part || 0) end def succeeded? state == 'succeeded' end def failed? FAILED_STATES.include?(state) end # has a build part with failed attempts but no successful ones yet def already_failed? build_parts.any? { |part| part.build_attempts.unsuccessful.exists? && !part.build_attempts.where(state: 'passed').exists? } end def aborted? state == 'aborted' end def promotable? succeeded? && branch_record.convergence? end def mergable_by_kochiku? succeeded? && merge_on_success_enabled? && repository.allows_kochiku_merges? && !newer_branch_build_exists? end def merge_on_success_enabled? !branch_record.convergence? && self.merge_on_success end def newer_branch_build_exists? most_recent_build = branch_record.most_recent_build most_recent_build.id != self.id end def merge_to_master! BuildStrategy.merge_ref(self) end def promote! unless promoted? BuildStrategy.promote_build(self) update!(promoted: true) end end def completed? TERMINAL_STATES.include?(state) end # Changes the build state to 'aborted'. Sets merge_on_success to false to # protect against accidental merges. Updates the state of all of the build's # 'runnable' build_parts to be 'aborted'. def abort! update!(state: 'aborted', merge_on_success: false) BuildAttempt .joins(:build_part) .where(:state => 'runnable', 'build_parts.build_id' => self.id) .update_all(state: 'aborted', updated_at: Time.current) end def to_color case state when 'succeeded' :green when 'failed', 'errored', 'aborted', 'doomed' :red else :blue end end def to_png case to_color when :green status_png(179, 247, 110) when :red status_png(247, 110, 110) when :blue status_png(110, 165, 247) end end def send_build_status_email! return if branch_record.convergence? && !previous_successful_build if completed? if failed? && !build_failure_email_sent? && repository.send_build_failure_email? unless build_failure_email_sent? BuildMailer.build_break_email(self).deliver_now update(build_failure_email_sent: true) end elsif succeeded? && !branch_record.convergence? && !build_success_email_sent? && repository.send_build_success_email? BuildMailer.build_success_email(self).deliver_now update(build_success_email_sent: true) end elsif !branch_record.convergence? && repository.email_on_first_failure && already_failed? && repository.send_build_failure_email? unless build_failure_email_sent? # due to race condition, update attribute before sending email update(build_failure_email_sent: true) BuildMailer.build_break_email(self).deliver_now end end end def is_running? IN_PROGRESS_STATES.include?(self.state) end def as_json(options = {}) # exclude test_command by default options[:except] ||= [:test_command] super(options.reverse_merge(methods: :elapsed_time)) end private def enqueue_initiated_by Resque.enqueue(BuildInitiatedByJob, self.id) end def status_png(r, g, b) ChunkyPNG::Canvas.new(13, 13, ChunkyPNG::Color::TRANSPARENT) .circle(6, 6, 5, ChunkyPNG::Color::BLACK, ChunkyPNG::Color.rgb(r, g, b)) end end ================================================ FILE: app/models/build_artifact.rb ================================================ require 'log_file_uploader' class BuildArtifact < ActiveRecord::Base belongs_to :build_attempt, :inverse_of => :build_artifacts, :touch => true mount_uploader :log_file, LogFileUploader skip_callback :commit, :after, :remove_log_file! validates :log_file, presence: true scope :stdout_log, -> { where(:log_file => ['stdout.log.gz', 'stdout.log']) } scope :error_txt, -> { where(:log_file => 'error.txt') } def as_json super(except: "log_file").tap do |hash| log_file = {"url" => Rails.application.routes.url_helpers.build_artifact_path(self), "name" => self.log_file.path} hash["build_artifact"]["log_file"] = log_file end end end ================================================ FILE: app/models/build_attempt.rb ================================================ class BuildAttempt < ActiveRecord::Base has_many :build_artifacts, :dependent => :destroy, :inverse_of => :build_attempt belongs_to :build_part, :inverse_of => :build_attempts, :touch => true has_one :build_instance, through: :build_part FAILED_BUILD_STATES = %w[failed errored].freeze COMPLETED_BUILD_STATES = %w[passed aborted] + FAILED_BUILD_STATES IN_PROGRESS_BUILD_STATES = %w[runnable running].freeze STATES = IN_PROGRESS_BUILD_STATES + COMPLETED_BUILD_STATES validates :state, inclusion: { in: STATES } scope :unsuccessful, -> { where(state: FAILED_BUILD_STATES) } def elapsed_time if finished_at && started_at finished_at - started_at elsif started_at Time.current - started_at end end def start!(builder) return false unless update_attributes(:state => 'running', :started_at => Time.current, :builder => builder) build = build_part.build_instance previous_state, new_state = build.update_state_from_parts! if previous_state == new_state # bump build's update_at because update_state_from_parts did not alter the build record build.touch end if previous_state != new_state Rails.logger.info("Build #{build.id} state is now #{build.state}") BuildStateUpdateJob.enqueue(build.id) end true end def finish!(state) return false unless update_attributes(:state => state, :finished_at => Time.current) if should_reattempt? # Will only send email if email_on_first_failure is enabled. build_part.build_instance.send_build_status_email! build_part.rebuild! elsif state == 'errored' BuildMailer.error_email(self, error_txt).deliver_now end build = build_part.build_instance previous_state, new_state = build.update_state_from_parts! if previous_state == new_state # bump build's update_at because update_state_from_parts did not alter the build record build.touch end if previous_state != new_state Rails.logger.info("Build #{build.id} state is now #{build.state}") BuildStateUpdateJob.enqueue(build.id) end true end def unsuccessful? FAILED_BUILD_STATES.include?(state) end def successful? state == 'passed' end def aborted? state == 'aborted' end def running? state == 'running' end def stopped? COMPLETED_BUILD_STATES.include?(state) end def errored? state == 'errored' end def should_reattempt? unsuccessful? && build_part.should_reattempt? end def error_txt error_artifact = build_artifacts.error_txt.first error_artifact.log_file.read if error_artifact end def files build_artifacts.as_json end end ================================================ FILE: app/models/build_part.rb ================================================ class BuildPart < ActiveRecord::Base # using 'build_instance' instead of 'build' because AR defines `build` for associations, and it wins belongs_to :build_instance, :class_name => "Build", :foreign_key => "build_id", :inverse_of => :build_parts has_many :build_attempts, :dependent => :destroy, :inverse_of => :build_part validates :kind, :paths, :queue, presence: true serialize :paths, Array serialize :options, Hash def last_attempt build_attempts.last end def create_and_enqueue_new_build_attempt! build_attempt = build_attempts.create!(:state => 'runnable') BuildAttemptJob.enqueue_on(queue.to_s, job_args(build_attempt)) build_instance.touch # invalidate the cache of builds#show build_attempt rescue GitRepo::RefNotFoundError # delete the dud build_attempt and re-raise build_attempt.destroy if build_attempt raise end alias rebuild! create_and_enqueue_new_build_attempt! def job_args(build_attempt) repository = build_instance.repository { "build_attempt_id" => build_attempt.id, "build_kind" => kind, "build_ref" => build_instance.ref, "branch" => build_instance.branch_record.name, "test_files" => paths, "repo_name" => "#{repository.name}-cache", # need to pass -cache for now for compatibility with current kochiku-worker "test_command" => build_instance.test_command, "repo_url" => repository.url_for_fetching, "remote_name" => "origin", "timeout" => repository.timeout.minutes, "options" => options, "kochiku_env" => Rails.env, } end def status if successful? 'passed' else last_attempt.try(:state) || 'unknown' end end def successful? build_attempts.any?(&:successful?) end def unsuccessful? !successful? end def running? started_at && !finished_at end def not_finished? !finished_at end def to_color case status when 'passed' :green when 'failed', 'errored', 'aborted' :red else :blue end end def started_at last_attempt.try(:started_at) end def finished_at last_attempt.try(:finished_at) end def elapsed_time if finished_at && started_at finished_at - started_at elsif started_at Time.current - started_at end end def as_json(options = {}) super(options.reverse_merge(methods: :status)) end def should_reattempt? if successful? false elsif (build_attempts.unsuccessful.count - 1) < retry_count true # automatically retry build parts that errored in less than 60 seconds elsif elapsed_time && elapsed_time < 60 && last_attempt.errored? && build_attempts.unsuccessful.count < 5 true else false end end end ================================================ FILE: app/models/repository.rb ================================================ require 'remote_server' # This Repository class should only concern itself with persisting and acting on # Repository records in the database. All non-database operations should go # through the RemoteServer classes. class Repository < ActiveRecord::Base has_many :branches, :dependent => :destroy has_many :convergence_branches, -> { where(convergence: true) }, class_name: "Branch" has_many :builds, through: :branches has_many :build_parts, through: :builds has_many :build_attempts, through: :build_parts validates :host, :name, :url, presence: true validates :name, uniqueness: { scope: :namespace, message: "^Namespace + Name combination already exists", case_sensitive: false } validates :timeout, numericality: { :only_integer => true } validates :timeout, inclusion: { in: 0..1440, message: 'The maximum timeout allowed is 1440 minutes' } validates :assume_lost_after, numericality: { :only_integer => true }, :allow_nil => true validates :url, uniqueness: true, allow_blank: true validate :validate_url_against_remote_servers def self.lookup_by_url(url) remote_server = RemoteServer.for_url(url) repository_namespace = remote_server.attributes.fetch(:repository_namespace) repository_name = remote_server.attributes.fetch(:repository_name) repository_host_and_aliases = remote_server.attributes.fetch(:possible_hosts) Repository.find_by(host: repository_host_and_aliases, namespace: repository_namespace, name: repository_name) end def self.lookup(host:, namespace:, name:) git_server_settings = Settings.git_server(host) Repository.find_by(host: [git_server_settings.host, *git_server_settings.aliases].compact, namespace: namespace, name: name) end # Setting a URL will extract values for host, namespace, and name. This # should not overwrite values for those attributes that were set in the same # session. def url=(value) # this column is deprecated; eventually url will just be a virtual attribute self[:url] = value return unless RemoteServer.parseable_url?(value) return unless RemoteServer.valid_git_host?(value) attrs_from_remote_server = RemoteServer.for_url(value).attributes self.host = attrs_from_remote_server[:host] unless host_changed? self.namespace = attrs_from_remote_server[:repository_namespace] unless namespace_changed? self.name = attrs_from_remote_server[:repository_name] unless name_changed? end def remote_server @remote_server ||= RemoteServer.for_url(url) end delegate :base_html_url, :base_api_url, :sha_for_branch, :url_for_fetching, :get_branch_url, :open_pull_request_url, to: :remote_server def promotion_refs on_green_update.split(",").map(&:strip).reject(&:blank?) end def interested_github_events event_types = ['pull_request'] event_types << 'push' if run_ci event_types end def scm_type Settings.git_server(self.url).type end # Public: attempts to lookup a build for the commit under any of the # repository's branches. This is done as an optimization since the contents # of the commit are guaranteed to not have changed. # # Returns: Build AR object or nil def build_for_commit(sha) Build.joins(:branch_record).find_by(:ref => sha, 'branches.repository_id' => self.id) end # Public: looks across all of a repository's builds for one with the given # SHA. If one does not exist it creates one on the branch given # # sha - String: git sha of the commit in question # branch - String or AR Branch: if an existing build is not found a new one will be created on this branch # # Returns: Build AR object def ensure_build_exists(sha, branch) build = build_for_commit(sha) unless build.present? build = branch.builds.create!(ref: sha, state: 'partitioning') end build end def name_with_namespace "#{namespace}/#{name}" end alias to_param name_with_namespace private def validate_url_against_remote_servers return unless url.present? if RemoteServer.parseable_url?(url) unless RemoteServer.valid_git_host?(url) errors.add(:url, "host is not in Kochiku's list of git servers") end else errors.add(:url, 'is not in a format supported by Kochiku') end end end ================================================ FILE: app/models/repository_observer.rb ================================================ class RepositoryObserver < ActiveRecord::Observer observe :repository def after_save(record) record.remote_server.install_post_receive_hook!(record) if setup_hook? end def setup_hook? Rails.env.production? || Rails.env.staging? end end ================================================ FILE: app/uploaders/base_log_file_uploader.rb ================================================ class BaseLogFileUploader < CarrierWave::Uploader::Base storage :file def cache_dir Rails.root.join('tmp', 'uploads') end end ================================================ FILE: app/uploaders/log_file_uploader.rb ================================================ require 'base_log_file_uploader' class LogFileUploader < BaseLogFileUploader def store_dir build_attempt_id = model.build_attempt_id build_part_id = model.build_attempt.build_part_id build_id = model.build_attempt.build_part.build_id # temporary backwards compatibility for old build artifacts created before the deploy on 08/25/2015 if model.build_attempt.created_at < Time.parse("2015-08-25 04:12:46 UTC").utc && (project_id = model.build_attempt.build_part.build_instance.project_id) project_param = ActiveRecord::Base.connection.select_value("select name from projects where id = #{project_id}") return File.join(project_param, "build_#{build_id}", "part_#{build_part_id}", "attempt_#{build_attempt_id}") end repository_param = model.build_attempt.build_part.build_instance.repository.to_param Rails.public_path.join("log_files", repository_param, "build_#{build_id}", "part_#{build_part_id}", "attempt_#{build_attempt_id}") end end ================================================ FILE: app/uploaders/on_success_uploader.rb ================================================ require 'base_log_file_uploader' class OnSuccessUploader < BaseLogFileUploader def store_dir build_id = model.id repository_param = model.repository.to_param Rails.root.join("public", "log_files", repository_param, "build_#{build_id}") end end ================================================ FILE: app/views/branches/health.html.haml ================================================ = content_for :title, "Health of #{@branch.name} branch of #{@repository.name_with_namespace}" = content_for :favicon do - if @current_build = favicon_link_tag image_path("#{@current_build.to_color}.png"), type: 'image/png' - content_for :header do %ul.links %li= link_to("Settings", repository_edit_path(@repository), class: "info") %li= link_to("Branches", repository_branches_path(@repository)) - cache(@branch) do - rate = error_free_pass_rate(@builds) - avg_rebuilds = average_number_of_rebuilds(@builds) - median_seconds_to_success = median_elapsed_time(@builds) .performance - if median_seconds_to_success %div{title: "Median elapsed time for successful builds on #{@branch.name}"} %span.label Median time: %span.number #{seconds_to_minutes(median_seconds_to_success)} minutes - unless @builds.empty? %div{title: "Average number of build parts reattempted to get to a successful build", class: rebuild_count_css_class(avg_rebuilds)} %span.number= format("%0.1f", avg_rebuilds) %span.label rebuilds required on average %div{title: "Percentage of builds with all parts succeeding on the first try", class: pass_rate_css_class(rate)} %span.number= rate %span.label pass rate on first try .stats %h2.subheader Build statistics - if @first_built_date %p First build created on #{@first_built_date.strftime("%Y-%m-%d")} %table.build-stats %thead %tr %th %th Total %th Failures %th Pass Rate %tbody %tr %td All Time (#{@days_since_first_build} days) %td= @total_build_count %td= @total_failure_count %td #{@total_pass_rate}% %tr %td Past 30 days %td= @last30_build_count %td= @last30_failure_count %td #{@last30_pass_rate}% %tr %td Past 7 days %td= @last7_build_count %td= @last7_failure_count %td #{@last7_pass_rate}% - if @part_climate.count > 0 %h2.subheader #{@repository.name}/#{@branch.name} part failure stats over #{@builds.count} builds %table.project-part-info %thead %tr %th.right.whisker Rate %th Part Info - @part_climate.sort.reverse_each do |key, parts_with_failures| %tr %td.right #{key[0]}% %td= format_paths(parts_with_failures.first) %tr %td %td - parts_with_failures.each do |part| = link_to repository_build_part_path(@repository, part.build_instance, part) do %span.part-status{class: [part.status, "attempt-#{part.build_attempts.size}"]} - else %h2.subheader #{@repository.name}/#{@branch.name} hasn't failed in #{@builds.count} builds ================================================ FILE: app/views/branches/index.html.haml ================================================ - content_for :header do %ul.links %li= link_to("Repositories", repositories_path) .projects %ul - @convergence_branches.each do |branch| %li.build-info.bold = link_to(branch.name, repository_branch_path(branch.repository, branch)) - @recently_active_branches.each do |branch| %li.build-info = link_to(branch.name, repository_branch_path(branch.repository, branch)) ================================================ FILE: app/views/branches/show.html.haml ================================================ = content_for :title, "#{@repository.name} : #{@branch.name}" = content_for :favicon do - if @current_build = favicon_link_tag image_path("#{@current_build.to_color}.png"), type: 'image/png' - content_for :header do %ul.links %li= link_to("Settings", repository_edit_path(@repository), class: "info") %li= link_to("Branches", repository_branches_path(@repository)) = form_for @build, url: request_new_build_repository_branch_path(@repository, @branch) do |f| - if @repository.enabled? = f.submit "Build", class: 'build-button' - else = f.submit "Build", class: 'build-button', disabled: 'true' - cache(@branch) do %div.health.button= link_to('Health', health_repository_branch_path(@repository, @branch, 'count' => 12)) - rate = error_free_pass_rate(@builds) - avg_rebuilds = average_number_of_rebuilds(@builds) - median_seconds_to_success = median_elapsed_time(@builds) .performance - if median_seconds_to_success %div{title: "Median elapsed time for successful builds on #{@branch.name}"} %span.label Median time: %span.number #{seconds_to_minutes(median_seconds_to_success)} minutes - else %div{title: "Median elapsed time for successful builds on #{@branch.name}"} %span.label Never built - unless @builds.empty? %div{title: "Average number of build parts reattempted to get to a successful build", class: rebuild_count_css_class(avg_rebuilds)} %span.number= format('%0.1f', avg_rebuilds) %span.label rebuilds required on average %div{title: "Percentage of builds with all parts succeeding on the first try", class: pass_rate_css_class(rate)} %span.number= rate %span.label completion rate on first try %h2.subheader = @repository.namespace + '/' + @repository.name – = @branch.name #plot .select_commit %input{type: "text", id: "build_ref_input", placeholder: "Commit Revision"} %button{onClick: "goto_ref();"} Lookup Commit = content_for :javascript do :javascript $(document).ready(Kochiku.graphBuildTimes("#{escape_javascript(@repository.to_param)}", "#{escape_javascript(@branch.name)}")); $(function() { var whisker = $('th.whisker'); $('table') .on('mouseenter', '.whisker .part-status', function() { whisker.text($(this).data('ref')); }) .on('mouseleave', '.whisker', function() { whisker.text('Previous'); }); }); var goto_ref = function() { window.location.href = "//" + location.host + "/builds/" + $('#build_ref_input').val(); }; - if @current_build %table.project-part-info %thead %tr %th.right.whisker Previous %th.status %code.build-status{class: @current_build.state} = link_to @current_build.ref[0, 5], repository_build_path(@repository, @current_build) - display_ruby_version = multiple_ruby_versions?(@current_build) - build_metadata_headers(@current_build, display_ruby_version).each do |header| %th{class: header.downcase.gsub(/\W+/, '-')}= header %th.type Type %th.right.time Elapsed %th.right.count Attempt - @build_parts.each do |_key, build_parts_by_build| - part = build_parts_by_build.values.first - build_part = build_parts_by_build[@current_build] - cache(part) do %tr{:id => dom_id(part)} %td.right.whisker - total_attempts = 0 - @builds[0..-2].each do |previous_build| - previous_part = build_parts_by_build[previous_build] - if previous_part.present? - attempts = previous_part.build_attempts.size - total_attempts += attempts = link_to "/#{@repository.to_param}/builds/#{previous_build.to_param}/parts/#{previous_part.to_param}" do %span.part-status{class: [previous_part.status, "attempt-#{attempts}"], title: pluralize(attempts, 'attempt'), data: { ref: previous_build.ref[0, 5] }} - else %span.part-status.attempt-0 %td - if build_part %span.part-status{class: build_part.status} = link_to build_part.status.to_s.capitalize, "/#{@repository.to_param}/builds/#{@current_build.to_param}/parts/#{build_part.to_param}" - build_metadata_values(@current_build, part, display_ruby_version).each do |value| %td= value %td= part.kind.to_s %td.right.elapsed= build_part.elapsed_time ? duration_strftime(build_part.elapsed_time) : '' if build_part %td.right = build_part.build_attempts.size if build_part - if @current_build = content_for :javascript do :javascript (function() { var startTimes = #{ start_times = {} @current_build.build_parts.each { |part| start_times[dom_id(part)] = part.started_at if part.running? } start_times.to_json }; var now = new Date(); for(var partDomId in startTimes) { if(!startTimes.hasOwnProperty(partDomId)) { continue; } var startTime = new Date(Date.parse(startTimes[partDomId])); $('.project-part-info tbody').find('tr#' + partDomId + ' > .elapsed').text( Math.round((now-startTime)/60000) + ":" + ("00" + (Math.round((now-startTime)/1000)%60)).slice(-2)); } })() ================================================ FILE: app/views/branches/show.json.erb ================================================ <% json_data = @branch.attributes json_data['recent_builds'] = @builds %> <%= json_data.to_json.html_safe %> ================================================ FILE: app/views/branches/show.rss.builder ================================================ xml.rss({:version => "2.0"}) do xml.channel do xml.title("Kochiku RSS Feed") xml.link(repository_branch_url(@repository, @branch)) xml.language("en") xml.ttl(10) @builds.each do |build| xml.item do xml.title("Build Number #{build.id} #{build_success_in_words(build)}") xml.pubDate(build.created_at.to_s) xml.guid(repository_build_url(@repository, build)) xml.link(repository_build_url(@repository, build)) end end end end ================================================ FILE: app/views/branches/status_report.xml.builder ================================================ xml.Projects do @branches.each do |branch| # currently cimonitor only utilizes the activity attribute xml.Project({ :name => branch.repository.to_param + (branch.name == 'master' ? '' : ('/' + branch.name)), :activity => build_activity(branch.builds.last), :lastBuildLabel => branch.builds.last.object_id, :webUrl => repository_branch_url(branch.repository, branch), :lastBuildStatus => (branch.last_completed_build.try(:succeeded?) ? "Success" : "Failure"), :lastBuildTime => branch.last_completed_build.try(:finished_at).try(:strftime, "%Y-%m-%dT%H:%M:%SZ") }) end end ================================================ FILE: app/views/build_attempts/_build_attempt.html.haml ================================================ .attempt{:class => build_attempt.state} = build_attempt.state ================================================ FILE: app/views/build_attempts/stream_logs.html.haml ================================================ %h2.subheader = @repository.name_with_namespace – = link_to @build.branch_record.name, repository_branch_path(@repository, @build.branch_record) – = link_to repository_build_path(@repository, @build) do %code.build-status{class: @build.state, title: @build.ref} = @build.ref[0, 7] – Part #{@build_part.id} .flash.error{'id' => 'errorMessage', 'style' => "display: none;"} Error streaming logs %label = check_box_tag :refresh, true, true Refresh %div{'id' => 'loadFull', 'style' => 'display: none;'} (skipping n bytes) %br .log_contents{'id' => 'log_content_display'} %img{:src => image_url('loader.gif'), 'id' => "loading_img"} = content_for :javascript do :javascript var currentPos = -1; var finished = false; var refreshIntervalId; var refreshInterval = 5000; var badRequests = 0; var startPos = -1; // autoscroll to the bottom to follow tail of log var scrolled_to_bottom = function() { return ((window.innerHeight + window.scrollY) >= document.body.scrollHeight); } var get_logs_chunk = function() { if($('input#refresh').is(':checked') && !finished) { $.getJSON("#{stream_logs_chunk_path(@build_attempt.id)}?start=" + currentPos, function( data ) { log_entry = data['Contents']; current_value = $('#log_content_display').text(); var scrolled = scrolled_to_bottom(); $('#log_content_display').text(current_value + log_entry); if (scrolled) { window.scrollTo(0,document.body.scrollHeight); } // first request: use it to determine whether any bytes have been skipped if (currentPos == -1) { startPos = data['Start']; if (data['Start'] > 0) { $('#loadFull').text("(skipping " + startPos + " bytes)"); $('#loadFull').show(); } } currentPos = data['Start'] + data['BytesRead']; if (data['state'] != 'running' && data['BytesRead'] == 0) { finished = true; $('#loading_img').hide(); } }) .fail(function() { clearInterval(refreshIntervalId); badRequests += 1; if (badRequests < 3) { refreshInterval *= 1.25; refreshIntervalId = window.setInterval(get_logs_chunk, refreshInterval); } else { $('#errorMessage').show(); $('#loading_img').hide(); } }); } }; $('input#refresh').click(function () { if ($('input#refresh').is(':checked')) { if (!finished) { $('#loading_img').show(); } } else { $('#loading_img').hide(); } }); $(document).ready(function() { window.scrollTo(0,document.body.scrollHeight); get_logs_chunk(); refreshIntervalId = window.setInterval(get_logs_chunk, refreshInterval); }); ================================================ FILE: app/views/build_mailer/build_break_email.html.haml ================================================ %html{xmlns: "http://www.w3.org/1999/html"} %head %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'} %body %h1 #{@build.branch_record.name} build failed for #{@build.repository.name} = link_to('Link to build.', repository_build_url(@build.repository, @build)) - if @pr_link = link_to('Link to PR.', @pr_link) - if @build.build_parts.count > 1 The build was sharded into #{@build.build_parts.count} parts and took #{@build.elapsed_time.to_i / 60} minutes. (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running.) %br Without sharding the build would have taken #{@build.linear_time.to_i / 60} minutes to run. %br - else The build took #{@build.elapsed_time.to_i / 60} minutes. (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running.) %br %h2 Failed build parts: %ul - @failed_build_parts.each do |failed_build_part| %li = link_to("Part: #{failed_build_part.kind}, number #{failed_build_part.id}", repository_build_part_url(@build.repository, @build, failed_build_part)) = failed_build_part_sentence(failed_build_part) %span.broken-path{style: "font-size: smaller; color: gray;"} - #{failed_build_paths(failed_build_part)} %br - @responsible_email_and_files.each do |email, files| #{email} was emailed because of changes to: %ul - files.each do |file| %li= file %br %h2 Changes #{@build.branch_record.convergence? ? 'since last success' : 'included in build'} - @git_changes.each do |git_change| %b SHA: #{link_to(git_change[:hash], @build.repository.remote_server.href_for_commit(git_change[:hash]))} %br %b Committer: = git_change[:author] %br %b Date: = git_change[:date] %br %pre= git_change[:message] %br %br ================================================ FILE: app/views/build_mailer/build_break_email.text.erb ================================================ <%= @build.branch_record.name %> build failed for <%= @build.repository.name %> <%= repository_build_url(@build.repository, @build) %> <% if @build.build_parts.count > 1 %> The build was sharded into <%= @build.build_parts.count %> parts and took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m running.) Without sharding the build would have taken <%= @build.linear_time.to_i/60 %> minutes to run. <% else %> The build took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m running.) <% end %> <% @responsible_email_and_files.each do |email, files| %> <%= email %> was emailed because of changes to <%= files.join(", ") %> <% end %> -------------------------------------------------------------------------------- <%= "Changes #{@build.branch_record.convergence? ? 'since last success' : 'included in build'}" %> -------------------------------------------------------------------------------- <% @git_changes.each do |git_change| %> SHA: <%= git_change[:hash] %> Committer: <%= git_change[:author] %> Date: <%= git_change[:date] %> <%= git_change[:message] %> <% end %> ================================================ FILE: app/views/build_mailer/build_success_email.html.haml ================================================ %html{xmlns: "http://www.w3.org/1999/html"} %head %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'} %body %h1 #{@build.branch_record.name} build succeeded for #{@build.repository.name} = link_to('Link to build.', repository_build_url(@build.repository, @build)) - if @pr_link = link_to('Link to PR.', @pr_link) - if @build.build_parts.count > 1 The build was sharded into #{@build.build_parts.count} parts and took #{@build.elapsed_time.to_i / 60} minutes. (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running#{@build.retry_count > 0 ? ", requiring #{@build.retry_count} retries.)" : ".)"} %br Without sharding the build would have taken #{(@build.linear_time.to_i / 60) * (1 + @build.max_retries)} minutes to run. %br - if @build.retry_count > 0 (#{@build.linear_time.to_i / 60}m building, requiring #{@build.max_retries} retries.) - else The build took #{@build.elapsed_time.to_i / 60} minutes. (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running#{@build.retry_count > 0 ? ", requiring #{@build.retry_count} retries.)" : ".)"} %br %h2 Changes included in build - @git_changes.each do |git_change| %b SHA: #{link_to(git_change[:hash], @build.repository.remote_server.href_for_commit(git_change[:hash]))} %br %b Committer: = git_change[:author] %b Date: = git_change[:date] %pre= git_change[:message] %br %br ================================================ FILE: app/views/build_mailer/build_success_email.text.erb ================================================ <%= @build.branch_record.name %> build succeeded for <%= @build.repository.name %> <%= repository_build_url(@build.repository, @build) %> <% if @build.build_parts.count > 1 %> The build was sharded into <%= @build.build_parts.count %> parts and took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m <% if @build.retry_count > 0 %> running, requiring <%= @build.retry_count %> retries.) <% else %> running.) <% end %> Without sharding the build would have taken <%= (@build.linear_time.to_i/60) * (1 + @build.max_retries) %> minutes to run.
<% if @build.retry_count > 0 %> (<%= @build.linear_time.to_i/60 %>m building, requiring <%= @build.max_retries %> retries.) <% end %> <% else %> The build took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m <% if @build.retry_count > 0 %> running, requiring <%= @build.retry_count %> retries.) <% else %> running.) <% end %> <% end %> -------------------------------------------------------------------------------- Changes included in build -------------------------------------------------------------------------------- <% @git_changes.each do |git_change| %> SHA: <%= git_change[:hash] %> Committer: <%= git_change[:author] %> Date: <%= git_change[:date] %> <%= git_change[:message] %> <% end %> ================================================ FILE: app/views/build_mailer/error_email.html.haml ================================================ %html{xmlns: "http://www.w3.org/1999/html"} %head %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'} %body %h2 Kochiku error on #{@builder} = repository_build_part_url(@build_part.build_instance.repository, @build_part.build_instance, @build_part) %h2 Error text %pre= @error_text ================================================ FILE: app/views/build_mailer/error_email.text.erb ================================================ Kochiku error on <%= @builder %> for <%= repository_build_part_url(@build_part.build_instance.repository, @build_part.build_instance, @build_part) %> <%= @error_text %> ================================================ FILE: app/views/build_parts/_build_attempts.html.haml ================================================ %tr{id: dom_id(attempt), :"data-id" => index + 1} %td.right= link_to(index + 1, attempt) %td %span.attempt-status{:class => attempt.state}= attempt.state.to_s.capitalize %td.rank= build_attempts_rank[attempt.id.to_s] %td= attempt.started_at %td.right.elapsed= duration_strftime(attempt.elapsed_time) %td.right= attempt.builder ? attempt.builder.sub(".#{Settings.domain_name}", '') : "pending" %td - if attempt.running? && attempt.log_streamer_port.present? = link_to("stdout.log (in progress)", stream_logs_path(attempt.id)) - else - attempt.build_artifacts.sort_by { |artifact| artifact.log_file.path }.each do |artifact| = link_to File.basename(artifact.log_file.path), artifact %br %td.wrap - unless attempt.stopped? = link_to("Abandon", finish_build_attempt_path(attempt, :state => 'aborted'), :method => :post) ================================================ FILE: app/views/build_parts/_build_part.html.haml ================================================ .part{:class => build_part.status, :title => build_part.paths.map{|path| "-#{path}"}.join("
")} -# build_part.build_instance.repository is important to prevent n+1 queries here. Using the :through does a bunch of SQL. %a.part-wrapper{:href => repository_build_part_path(build_part.build_instance.repository, build_part.build_instance, build_part)} .kind - case build_part.kind - when "spec" Specs - when "cucumber" Cukes - else = build_part.kind - if build_part.build_attempts.any? .attempts = render build_part.last_attempt - if build_part.build_attempts.size > 1 = "..." ================================================ FILE: app/views/build_parts/show.html.haml ================================================ = content_for :title do = @build.ref[0, 7] – = @repository.name = content_for :favicon do = favicon_link_tag image_path("#{@build_part.to_color}.png"), :type => 'image/png' %h2.subheader = link_to(@build.repository.name_with_namespace, repository_branches_path(@build.repository)) – = link_to(@build.branch_record.name, repository_branch_path(@build.repository, @build.branch_record)) – = link_to repository_build_path(@repository, @build) do %code.build-status{class: @build.state, title: @build.ref} = @build.ref[0, 7] – #{@build_part.kind} (part #{@build_part.id}) .actions %label - if @repository.enabled? = link_to("Rebuild", rebuild_repository_build_part_path(@repository, @build, @build_part), method: :post, class: "rebuild button") %label = check_box_tag :refresh, true, @build_part.not_finished? Refresh .build-info.build-info-subheader %span.info %span.status{:class => 'build-part-' + @build_part.status.to_s}= @build_part.status.to_s.capitalize on %span.queue #{@build_part.queue} queue %table.build-part-info %thead %tr %th.right.count Attempt %th.status Status %th.queue-position Position %th Started At %th.right.time Elapsed Time %th.right.worker Worker %th Build Artifacts %th.right.actions Actions %tbody - @build_part.build_attempts.each_with_index do |attempt, index| = render partial: 'build_parts/build_attempts', locals: {attempt: attempt, index: index, build_attempts_rank: @build_attempts_rank} %ol#build-paths - if @build_part.options['total_workers'] && @build_part.options['worker_chunk'] %li Chunk #{@build_part.options['worker_chunk']} of #{@build_part.options['total_workers']} - @build_part.paths.each do |path| %li= path = content_for :javascript do :javascript if ($('.build-part-info tbody tr').length > 0) { $('.build-part-info').tablesorter({ sortList: [ [0, 0] ] }); StartTimes = #{ # rubocop:disable Style/IndentationConsistency start_times = {} @build_part.build_attempts.each_with_index { |attempt, index| start_times[index + 1] = attempt.started_at } start_times.to_json }; } if ( "Notification" in window && Notification.permission == "default") { Notification.requestPermission(); } Kochiku.buildInfo = {table: '.build-part-info tbody', renderTime: Date.parse("#{raw @build_part.updated_at}"), state: "#{@build.state}"}; Kochiku.buildInfo.id = #{@build.id}; Kochiku.buildInfo.branch = "#{@build.branch_record.name}"; Kochiku.buildInfo.repo = "#{@build.repository.name}"; Kochiku.terminalStates = #{raw BuildAttempt::COMPLETED_BUILD_STATES}; Kochiku.doneMessage = "BuildPart on " Kochiku.delayedRefresh(Kochiku.buildInfo); ================================================ FILE: app/views/builds/_build.html.haml ================================================ .build %a.build-wrapper{:href => repository_build_path(build.repository, build)} .build-info .ref= build.ref %h3.build-id{:class => build.state}= build.id .times .time-started Started at = time_for(build.created_at, "%m/%d %I:%M%P") .time-elapsed - if build.completed? Built in %strong = duration_strftime(build.elapsed_time, "%Hh %Mm %Ss") .build-state %span.info %span.state{:class => 'build-' + build.state.to_s}= build.state.to_s.capitalize on %span.queue= build.queue.to_s.capitalize - if build.branch_record.convergence? %a.info{:href => show_link_to_compare(build, build.previous_successful_build.try(:ref), build.ref), :title => 'show changes since last green build'} Compare to last green build - elsif build.succeeded? %a.info{:href => show_link_to_create_pull_request(build), :title => 'create a pull request against master'} Create pull request - if build.on_success_script_log_file.present? = link_to File.basename(build.on_success_script_log_file.to_s), build.on_success_script_log_file.url, :class => :info %a.info.last{:href => show_link_to_commit(build.repository, build.ref)} Show HEAD commit .parts= render build.build_parts ================================================ FILE: app/views/builds/_build_parts.html.haml ================================================ - display_ruby_version = multiple_ruby_versions?(build) %tr{:"data-id" => part.id} %td.right= link_to(part.id, repository_build_part_path(build.repository, build, part)) %td %span.part-status{:class => part.status} - text = part.status.to_s.capitalize - if part.status == 'running' && part.last_attempt.log_streamer_port.present? = link_to(text, stream_logs_path(part.last_attempt.id)) - elsif (artifact = part.most_recent_stdout_artifact) = link_to(text, artifact, :title => 'Last completed stdout.log') - else = text - position = build_parts_position[part.id] - if position.present? %td{:class => 'queue-position-value'} = position - else %td - build_metadata_values(build, part, display_ruby_version).each do |value| %td= value %td= part.kind.to_s %td.right - builder = part.last_attempt.try(:builder) = builder ? builder.sub(".#{Settings.domain_name}", '') : "pending" %td.right.elapsed= part.elapsed_time ? duration_strftime(part.elapsed_time) : "pending" %td.right= part.build_attempts.size %td.right - if part.unsuccessful? && repository.enabled? = link_to("Rebuild", rebuild_repository_build_part_path(build.repository, build, part), :method => :post) ================================================ FILE: app/views/builds/show.html.haml ================================================ = content_for :title do = @build.ref[0, 7] – = @build.repository.name = content_for :favicon do = favicon_link_tag image_path("#{@build.to_color}.png"), :type => 'image/png' %h2.subheader = @build.repository.name – = link_to(@build.branch_record.name, repository_branch_path(@build.repository, @build.branch_record)) – %code.build-status{class: @build.state, title: @build.ref} = @build.ref[0, 7] %a.info{:href => show_link_to_commit(@build.repository, @build.ref)} Show - if @build.branch_record.convergence? %a.info{:href => show_link_to_compare(@build, @build.previous_successful_build.try(:ref), @build.ref), :title => 'show changes since last green build'} Compare to last green build .actions - if @build.succeeded? = button_to "Sync status to #{@build.repository.scm_type}", resend_status_repository_build_path(@build.repository, @build), :method => :post - if @build.repository.allows_kochiku_merges? %form{action: toggle_merge_on_success_repository_build_path(@build.repository, @build), method: :post} %label = check_box_tag :merge_on_success, true, @build.merge_on_success_enabled?, disabled: !eligible_for_merge_on_success?(@build), onchange: 'this.form.submit()' Merge on Success %label = check_box_tag :refresh, true, @build.is_running? Refresh .build-info.build-info-subheader - if @build.succeeded? && !@build.branch_record.convergence? %a.info{:href => show_link_to_create_pull_request(@build), :title => 'create a pull request against master'} Create pull request %span.info Created = timeago(@build.created_at) %span.info Updated = timeago(@build.updated_at, :id => "time-since-update") - if @build.completed? %span.info Built in #{duration_strftime(@build.elapsed_time, "%Hh %Mm %Ss")} - if @build.completed? && @build.failed? && @build.build_parts.present? %span.info #{@build.build_parts.failed.count} out of #{@build.build_parts.count} build parts failed - if @build.is_running? %span.info = button_to "Abort Build", abort_repository_build_path(@build.repository, @build), method: :patch, class: "abort-build" - if @repository.enabled? && (@build.failed? || @build.aborted?) %span.info - if @build.build_parts.empty? || !@build.error_details.empty? = button_to "Retry Partitioning", retry_partitioning_repository_build_path(@build.repository, @build), :method => :post, :form_class => "retry-partitioning" - else = button_to "Rebuild failed parts", rebuild_failed_parts_repository_build_path(@build.repository, @build), :method => :post, :form_class => "rebuild-parts" - if @build.error_details.present? .build-error %h2 Build error %pre= [@build.error_details[:message], @build.error_details[:backtrace]].join("\n") - if @build.succeeded? && @build.build_parts.count == 0 .build-empty %h2 Build Empty %span.info Partitioner did not return any work for this build. %table.build-summary#build-summary %thead %tr %th.right.id Part %th.status Status %th.queue-position Position - display_ruby_version = multiple_ruby_versions?(@build) - build_metadata_headers(@build, display_ruby_version).each do |header| %th{class: header.downcase.gsub(/\W+/, '-')}= header %th.type Type %th.right.worker Worker %th.right.time Elapsed Time %th.right.count Attempt %th.right.actions Actions %tbody - @build.build_parts.decorate.each do |part| = render partial: 'builds/build_parts', locals: {part: part, build: @build, build_parts_position: @build_parts_position, repository: @repository} - if @build.on_success_script_log_file.url = link_to(File.basename(@build.on_success_script_log_file.path), @build.on_success_script_log_file.url) = content_for :javascript do :javascript if ($('#build-summary tbody tr').length > 0) { $('#build-summary').tablesorter({ sortList: [ [1, 0] ] }); StartTimes = #{ # rubocop:disable Style/IndentationConsistency start_times = {} @build.build_parts.each { |part| start_times[part.id] = part.started_at } start_times.to_json }; } if ( "Notification" in window && Notification.permission == "default") { Notification.requestPermission(); } Kochiku.buildInfo = {table: '.build-summary tbody', renderTime: Date.parse(#{raw @build.updated_at.to_json}), state: "#{@build.state}"}; Kochiku.buildInfo.id = #{@build.id}; Kochiku.buildInfo.branch = "#{@build.branch_record.name}"; Kochiku.buildInfo.repo = "#{@build.repository.name}"; Kochiku.terminalStates = #{raw Build::TERMINAL_STATES}; Kochiku.delayedRefresh(Kochiku.buildInfo); Kochiku.doneMessage = "Build on " $('abbr.timeago').click(function() { // swap the relative time with the absolute time var originalText = $(this).text(); $(this).text($(this).attr('title')); $(this).attr('title', originalText); }); ================================================ FILE: app/views/dashboards/build_history_by_worker.html.haml ================================================ #worker-health-wrap %table{:class => "worker-health"} %thead %th.right Worker %th Partition Attempts %tbody %tr %td.right= "Partition workers" %td - @partition_jobs.each do |partition_job| = link_to build_redirect_path(partition_job) do - if !partition_job.build_parts.empty? %span.attempt-status{:class => :passed} // ugly hack because in this table, partitioning means "in progress", whereas in // the following build attempts table, it means not started yet - elsif partition_job.state == 'partitioning' %span.attempt-status{:class => :running} - else %span.attempt-status{:class => partition_job.state} %br %table{:class => "worker-health"} %thead %th.right Worker %th Build Attempts %tbody - @workers.each do |worker_name, build_attempts| %tr %td.right= worker_name %td - build_attempts.each do |build_attempt| = link_to build_attempt do %span.attempt-status{:class => build_attempt.state} ================================================ FILE: app/views/layouts/application.html.haml ================================================ !!! %html{:lang => 'en'} %head %meta{:charset => 'utf-8'} %meta{:name => 'google', :value => 'notranslate'} %title - if (title = yield(:title)).present? #{title} – Kochiku = stylesheet_link_tag 'tablesorter.theme.kochiku.css' = stylesheet_link_tag 'tipTip.css', :media => 'screen' = stylesheet_link_tag 'screen.css', :media => 'all' = csrf_meta_tag - if (favicon = yield(:favicon)).present? = favicon - else = favicon_link_tag '/favicon.ico' %body #page .section-wrapper#header .section %a.logo{:href => root_path, :title => "Home"} %h1 %b Kochiku %ruby.translation 構 こうちく .header-right = yield :header - if @repository && !@repository.enabled? .section-wrapper#disabled-repo-alert .section .warn This repository is currently disabled. .section-wrapper#content .section - if flash[:error].present? .flash.error = flash[:error] - if flash[:warn].present? .flash.warn = flash[:warn] - if flash[:message].present? .flash.message = flash[:message] = yield #nav .section= link_to("Build taking too long? (Resque Admin)", "/resque") .section= link_to("Worker Health", build_history_by_worker_path(count: 5000)) = javascript_include_tag 'application' :javascript $(document).ready(function() { $('.part').tipTip({ delay: 100, maxWidth: "auto", edgeOffset: 10, fadeIn: 100, fadeOut: 100 }); }); = yield :javascript ================================================ FILE: app/views/merge_mailer/merge_failed.text.erb ================================================ Kochiku build automatically merged build: <%= [@build.repository.to_param, @build.id, @build.branch_record.name].join(", ") %>. stdout & stderr: <%= @stdout_and_stderr %> ================================================ FILE: app/views/merge_mailer/merge_successful.html.erb ================================================

Kochiku automatically merged build: <%= @build.repository.to_param %>, <%= @build.id %>, <%= link_to_branch @build %>.

The ref for the merge is <%= link_to_commit(@build.repository, @merge_commit) %>.

stdout & stderr:
<%= @stdout_and_stderr %>
================================================ FILE: app/views/repositories/_form.html.haml ================================================ = form_for @repository, :url => form_url, :html => { :id => 'repository-form' } do |f| = f.error_messages %div %label{:for => "url"} Repository URL: = f.text_field :url, :placeholder => '', :autocapitalize => 'off', :autocorrect => 'off', :spellcheck => 'false' %div %label{:for => "convergence_branches"} Convergence Branches: = text_field_tag :convergence_branches, @current_convergence_branches.join(', '), placeholder: "master", autocapitalize: 'off', autocorrect: 'off', spellcheck: 'false' - if @repository.test_command.present? %div{title: 'Test command should now be specified in the kochiku.yml'} %label{:for => "test_command"} Test Command: (Deprecated) = f.text_field :test_command, :id => 'test_command', :disabled => true %div %label{:for => "timeout"} Timeout a build part after: = f.text_field :timeout, :id => "timeout", :class => "short" minutes %div %label{:for => "assume_lost_after"} Assume that a build has been lost if its still running after: = f.text_field :assume_lost_after, :id => "assume_lost_after", :class => "short" minutes %div %label{:for => "run_ci"} Trigger build on push to master: = f.check_box :run_ci, :id => "run_ci" %div %label{:for => "enabled"} Enable repository: = f.check_box :enabled, :id => "enabled" %fieldset %legend Pull Requests %div %label{:for => "build_pull_requests"} Build pull requests: = f.check_box :build_pull_requests, :id => "build_pull_requests" %div %label{:for => "allows_kochiku_merges"} Allow developers to request branches be merged into master on success: = f.check_box :allows_kochiku_merges, :id => "allows_kochiku_merges" - display_css = @repository.allows_kochiku_merges ? '' : 'display: none' %span{:id => 'branch-delete-warning', :style => display_css} Warning: Kochiku will delete the branch from Git after merging. %div %label{:for => "send_merge_successful_email"} Send email on automatic merge success: = f.check_box :send_merge_successful_email, :id => "send_merge_successful_email", :disabled => @repository.allows_kochiku_merges ? nil : true %fieldset %legend On a green build %div %label{:for => "on_green_update"} Update branches to last green commit: = f.text_field :on_green_update, :id => "on_green_update", :placeholder => "Comma separated list of branch names" %div %label{:for => "send_build_success_email"} Send email to contributers: = f.check_box :send_build_success_email, :id => "send_build_success_email" %fieldset %legend On a red build %div %label{:for => "send_build_failure_email"} Send email to build breakers: = f.check_box :send_build_failure_email, :id => "send_build_failure_email" %div %label{:for => "email_on_first_failure"} Email on first build part failure for branch builds: = f.check_box :email_on_first_failure, :id => "email_on_first_failure" = f.submit @repository.new_record? ? "Create" : "Update" - unless @repository.new_record? = button_to "Delete", repository_path(@repository.id), method: :delete, form_class: "delete-form", class: "danger-button", data: {confirm: "This is a permanent destructive action, are you sure?"} = content_for :javascript do :javascript $('#allows_kochiku_merges').change(function(){ $('#branch-delete-warning').toggle($("#allows_kochiku_merges").is(':checked')); $('#send_merge_successful_email').prop("disabled", !$("#allows_kochiku_merges").is(':checked')); }); $('#repository-form').submit(function(event) { // Manually update the hidden element generated by the Rails check_box helper. // // This workaround is neccessary for send_merge_successful_email to // maintain it's value when its input is is 'disabled'. $("input[name='repository[send_merge_successful_email]'][type='hidden']").val( $("input[name='repository[send_merge_successful_email]'][type='checkbox']").is(':checked') ); }); ================================================ FILE: app/views/repositories/dashboard.html.haml ================================================ - content_for :header do %ul.links %li= link_to("Repositories", repositories_path) .projects.projects-grid - @branches.each do |branch| - cache(branch) do %div.ci-build-info{:class => "ci-#{branch.most_recent_build_state}"} %div.project-name = link_to(branch.repository.name, repository_branch_path(branch.repository, branch)) - if branch.most_recent_build.try(:is_running?) %div.state{:class => "build-#{branch.most_recent_build_state}"} = branch.most_recent_build_state.to_s.capitalize - if branch.last_completed_build && branch.last_completed_build.finished_at %div.state Last built = timeago(branch.last_completed_build.finished_at) - else %div.state Never built - if branch.last_completed_build %div.state{:class => "build-#{branch.last_completed_build.state}"} = branch.last_completed_build.state.to_s.capitalize = "in " + distance_of_time_in_words(branch.last_build_duration) if branch.last_build_duration %div.project-link = link_to("all branches", repository_branches_path(branch.repository)) ================================================ FILE: app/views/repositories/edit.html.haml ================================================ = render 'form', form_url: repository_url(@repository.id) %br %h3 Web Hooks %p %strong Build SHA %br = link_to build_ref_repository_url(@repository.id, ref: 'master', sha: 'abc123') %br %em.hint POST to this URL from your source control system to trigger a build of the given branch/SHA. ================================================ FILE: app/views/repositories/index.html.haml ================================================ .repositories .new-repository-link = link_to("Add Repository", new_repository_path) %ul - @repositories.each do |repository| %li.build-info = link_to(repository.url, repository_edit_path(repository)) ================================================ FILE: app/views/repositories/new.html.haml ================================================ = render 'form', form_url: repositories_url ================================================ FILE: bin/bundle ================================================ #!/usr/bin/env ruby ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) load Gem.bin_path('bundler', 'bundle') ================================================ FILE: bin/rails ================================================ #!/usr/bin/env ruby begin load File.expand_path('../spring', __FILE__) rescue LoadError => e raise unless e.message.include?('spring') end APP_PATH = File.expand_path('../../config/application', __FILE__) require_relative '../config/boot' require 'rails/commands' ================================================ FILE: bin/rake ================================================ #!/usr/bin/env ruby begin load File.expand_path('../spring', __FILE__) rescue LoadError => e raise unless e.message.include?('spring') end require_relative '../config/boot' require 'rake' Rake.application.run ================================================ FILE: bin/setup ================================================ #!/usr/bin/env ruby require 'pathname' # path to your application root. APP_ROOT = Pathname.new File.expand_path('../../', __FILE__) Dir.chdir APP_ROOT do # This script is a starting point to setup your application. # Add necessary setup steps to this file: puts "== Installing dependencies ==" system "gem install bundler --conservative" system "bundle check || bundle install" # puts "\n== Copying sample files ==" # unless File.exist?("config/database.yml") # system "cp config/database.yml.sample config/database.yml" # end puts "\n== Preparing database ==" system "bin/rake db:setup" puts "\n== Removing old logs and tempfiles ==" system "rm -f log/*" system "rm -rf tmp/cache" puts "\n== Restarting application server ==" system "touch tmp/restart.txt" end ================================================ FILE: bin/spring ================================================ #!/usr/bin/env ruby # This file loads spring without using Bundler, in order to be fast. # It gets overwritten when you run the `spring binstub` command. unless defined?(Spring) require 'rubygems' require 'bundler' if (match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m)) Gem.paths = { 'GEM_PATH' => [Bundler.bundle_path.to_s, *Gem.path].uniq.join(Gem.path_separator) } gem 'spring', match[1] require 'spring/binstub' end end ================================================ FILE: config/application.dev.yml ================================================ # Email address to use in the 'from' field for emails sent by Kochiku. sender_email_address: 'kochiku@example.com' # Email address where kochiku should send problems with the build system (for example, errors), # as distinct from failures in a particular test (which go to the people who committed code). kochiku_notifications_email_address: 'kochiku-notifications@example.com' # Domain name to use in constructing generic addresses. For example noreply@example.com in git commits. domain_name: 'example.com' # Set to true if Kochiku is served over https use_https: false # Host name where Kochiku is serving web pages. kochiku_host: 'kochiku.example.com' # If you commit with hitch/git-pair, etc, set this in order to send email to each person in the pair. # For example, github+joe+bob@example.com will turn into emails to joe@example.com and bob@example.com # if git_pair_email_prefix is set to 'github'. git_pair_email_prefix: 'github' # Mail server which will accept mail on port 25 (standard SMTP port). If you need to use another port, # or other settings, you currently need to edit the kochiku source (config.action_mailer settings in # config/environments/production.rb). smtp_server: 'localhost' # Host and port to connect to for Redis communication. redis_host: '127.0.0.1' redis_port: 6379 # List your git servers (at least for now, they need to be either github, github enterprise, or # Atlassian Stash for things like constructing URLs to pages on those servers. Would be nice to # just turn off the fancy features for a vanilla git server instead, but that isn't yet possible). # possible values for type are: github or stash git_servers: github.com: type: github # It is highly recommended that you create an OAuth token for Kochiku on # Github. This will allow Kochiku to do many things including display build # status on pull requests. # github.com: # type: github # oauth_token_file: /path/to/github_oauth_token # If you would like Kochiku to clone and fetch repositories from a git mirror # define the repository and fill in the url to your mirror. # git.example.com: # mirror: 'git://git-mirror.example.com/' # If you have multiple domains pointing at your git server then define them as aliases # git.example.com: # aliases: # - alias.example # - git.alias.com # Example of Atlassian Stash integration. # stash.example.com: # type: stash # username: kochiku-robot # password_file: config/secrets/kochiku-robot-password ================================================ FILE: config/application.rb ================================================ require File.expand_path('../boot', __FILE__) require 'rails/all' Bundler.require(:default, Rails.env) I18n.enforce_available_locales = true module Kochiku class Application < Rails::Application config.generators do |g| g.template_engine :haml g.test_framework :rspec g.helper false end # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Custom directories with classes and modules you want to be autoloadable. # ACHTUNG: intentionally empty to help catch missing requires for config.threadsafe! # config.autoload_paths += %W() # Only load the plugins named here, in the order given (default is alphabetical). # :all can be used as a placeholder for all plugins not explicitly named. # config.plugins = [ :exception_notification, :ssl_requirement, :all ] # Activate observers that should always be running. config.active_record.observers = :repository_observer # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. config.time_zone = 'Pacific Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # Configure the default encoding used in templates for Ruby 1.9. config.encoding = "utf-8" # Configure sensitive parameters which will be filtered from the log file. # config.filter_parameters += [:password] # Enable escaping HTML in JSON. config.active_support.escape_html_entities_in_json = true # Use SQL instead of Active Record's schema dumper when creating the database. # This is necessary if your schema can't be completely dumped by the schema dumper, # like if you have constraints or database-specific column types # config.active_record.schema_format = :sql # Version of your assets, change this if you want to expire all your assets config.assets.version = '1.0' config.assets.precompile << Proc.new{ |path| !File.basename(path).starts_with?('_') } end end ================================================ FILE: config/application.test.yml ================================================ ######################################################################### # # # This version of the application.yml is used by the Kochiku test suite # # # ######################################################################### # The descriptions for these settings are in config/application.dev.yml sender_email_address: 'kochiku@example.com' kochiku_notifications_email_address: 'kochiku-notifications@example.com' domain_name: 'example.com' use_https: false kochiku_host: 'kochiku.example.com' git_pair_email_prefix: 'github' smtp_server: 'localhost' redis_host: '127.0.0.1' git_servers: github.com: type: github git.example.com: type: github git.squareup.com: type: github stash.example.com: type: stash ================================================ FILE: config/application.yml ================================================ # Place your production Kochiku application config here. # # Start by copying the contents of config/application.dev.yml and modify as # desired. ================================================ FILE: config/boot.rb ================================================ require 'rubygems' # Set up gems listed in the Gemfile. ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) ================================================ FILE: config/compass.rb ================================================ # This configuration file works with both the Compass command line tool and within Rails. # Require any additional compass plugins here. project_type = :rails # Set this to the root of your project when deployed: http_path = "/" # You can select your preferred output style here (can be overridden via the command line): # output_style = :expanded or :nested or :compact or :compressed # To enable relative paths to assets via compass helper functions. Uncomment: # relative_assets = true # To disable debugging comments that display the original location of your selectors. Uncomment: # line_comments = false preferred_syntax = :sass ================================================ FILE: config/database.production.yml.sample ================================================ production: adapter: mysql2 encoding: utf8 reconnect: true username: kochiku password: the_password database: kochiku host: localhost ================================================ FILE: config/database.yml ================================================ development: &defaults adapter: mysql2 encoding: utf8 reconnect: false database: kochiku_development pool: 15 username: root password: test: &TEST <<: *defaults database: kochiku_test host: 127.0.0.1 ================================================ FILE: config/deploy/production.rb ================================================ # Default value for default_env is {} # set :default_env, { path: "/opt/ruby/bin:$PATH" } # Server that is running the Kochiku Rails app server 'kochiku.example.com', user: 'kochiku', roles: %w{web app db worker} ================================================ FILE: config/deploy.rb ================================================ # Lock version to protect against cap command being called without bundle exec # and executing with another version lock '3.4.0' set :application, "Kochiku" set :repo_url, "https://github.com/square/kochiku.git" set :user, "kochiku" ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp } # Default value for :format is :pretty # set :format, :pretty # Default value for :log_level is :debug # set :log_level, :debug # Default value for :pty is false # set :pty, true set :deploy_to, "/app/#{fetch(:user)}/kochiku" set :deploy_via, :remote_cache set :linked_dirs, %w{log} # Reference Capistrano's flow diagram for help choosing hooks # http://capistranorb.com/documentation/getting-started/flow/ before "deploy:started", "kochiku:setup" after "deploy:symlink:shared", "kochiku:symlinks" before "deploy:updated", "deploy:overwrite_database_yml" # warn if a legacy deploy deploy.custom.rb is in place if File.exist?(File.expand_path('deploy.custom.rb', File.dirname(__FILE__))) warn "Kochiku has upgraded to Capistrano 3. Placing custom capistrano config in deploy.custom.rb is no longer supported. Please move Capistrano settings to config/deploy/production.rb and remove deploy.custom.rb to make this message go away." exit(1) end ================================================ FILE: config/environment.rb ================================================ # Load the rails application require File.expand_path('../application', __FILE__) # Load application settings for Kochiku require File.expand_path('../../lib/settings_accessor', __FILE__) CONF_FILE = if Rails.env.test? File.expand_path('../application.test.yml', __FILE__) elsif Rails.env.development? File.expand_path('../application.dev.yml', __FILE__) else File.expand_path('../application.yml', __FILE__) end raise("#{CONF_FILE} is required to start Kochiku") unless File.exist?(CONF_FILE) Settings = SettingsAccessor.new(File.read(CONF_FILE)) # Disable symbol and yaml parsing in the XML parser to avoid # other code paths being exploited. # https://www.ruby-forum.com/attachment/8029/cve-2013-0156-poc.txt ActiveSupport::XmlMini::PARSING.delete("symbol") ActiveSupport::XmlMini::PARSING.delete("yaml") # Initialize the rails application Kochiku::Application.initialize! ================================================ FILE: config/environments/development.rb ================================================ Kochiku::Application.configure do # Settings specified here will take precedence over those in config/application.rb # In the development environment your application's code is reloaded on # every request. This slows down response time but is perfect for development # since you don't have to restart the webserver when you make code changes. config.cache_classes = false config.eager_load = false # Show full error reports config.consider_all_requests_local = true # Enable page, action, and fragment caching # # Important to have enabled in development to keep cache related bugs from # slipping through. config.action_controller.perform_caching = true config.cache_store = :memory_store, { size: 67108864 } # 64.megabytes # Uncomment to use Redis caching in development # # config.cache_store = :readthis_store, { # expires_in: 2.days.to_i, # namespace: 'cache', # marshal: JSON, # redis: { # host: Settings.redis_host, # port: Settings.redis_port, # db: 1, # use different db than Resque # driver: :hiredis # } # } # Don't care if the mailer can't send config.action_mailer.raise_delivery_errors = false # Print deprecation notices to the Rails logger config.active_support.deprecation = :log # Raise an error on page load if there are pending migrations config.active_record.migration_error = :page_load # Debug mode disables concatenation and preprocessing of assets. # This option may cause significant delays in view rendering with a large # number of complex assets. config.assets.debug = true # suppress output of asset requests. Formerly handled by quiet_assets gem config.assets.quiet = true # Generate digests for assets URLs # config.assets.digest = false config.sass.preferred_syntax = :sass Rails.application.routes.default_url_options[:host] = "localhost:3000" config.action_mailer.default_url_options = {:host => "localhost:3000"} config.after_initialize do Bullet.enable = true Bullet.bullet_logger = true Bullet.console = true Bullet.rails_logger = true # Added because Branches#show.rss does not use the build_attempts but Branches#show.html does use them Bullet.add_whitelist :type => :unused_eager_loading, :class_name => "BuildPart", :association => :build_attempts end end ================================================ FILE: config/environments/production.rb ================================================ Kochiku::Application.configure do # Settings specified here will take precedence over those in config/application.rb # Code is not reloaded between requests config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both thread web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on config.consider_all_requests_local = true # internal service; safe to show errors config.action_controller.perform_caching = true config.cache_store = :readthis_store, { expires_in: 2.days.to_i, namespace: 'cache', marshal: JSON, redis: { host: Settings.redis_host, port: Settings.redis_port, db: 1, # use different db than Resque driver: :hiredis } } # Disable Rails's static asset server (Apache or nginx will already do this) config.serve_static_files = false # Compress JavaScripts and CSS config.assets.js_compressor = :uglifier # Don't fallback to assets pipeline if a precompiled asset is missed config.assets.compile = false # Generate digests for assets URLs config.assets.digest = true # Specifies the header that your server uses for sending files # config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. config.force_ssl = true # See everything in the log (default is :debug) config.log_level = :info # Prepend all log lines with the following tags # config.log_tags = [ :subdomain, :uuid ] # Use a different logger for distributed setups # config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new) # Enable serving of images, stylesheets, and javascripts from an asset server # config.action_controller.asset_host = "http://assets.example.com" # Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added) # config.assets.precompile += %w( search.js ) # Disable delivery errors, bad email addresses will be ignored # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation can not be found) config.i18n.fallbacks = true # Send deprecation notices to registered listeners config.active_support.deprecation = :notify # Disable automatic flushing of the log to improve performance. # config.autoflush_log = false # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new Rails.application.routes.default_url_options[:host] = Settings.kochiku_host Rails.application.routes.default_url_options[:protocol] = Settings.kochiku_protocol config.action_mailer.default_url_options = {:host => Settings.kochiku_host, :protocol => Settings.kochiku_protocol} config.action_mailer.delivery_method = :smtp config.action_mailer.smtp_settings = { :address => Settings.smtp_server, :port => 25 } end ================================================ FILE: config/environments/staging.rb ================================================ Kochiku::Application.configure do # Settings specified here will take precedence over those in config/application.rb config.cache_classes = true config.eager_load = true config.consider_all_requests_local = true # internal service; safe to show errors config.action_controller.perform_caching = true config.cache_store = :readthis_store, { expires_in: 2.days.to_i, namespace: 'cache', marshal: JSON, redis: { host: Settings.redis_host, port: Settings.redis_port, db: 1, # use different db than Resque driver: :hiredis } } # Disable Rails's static asset server (Apache or nginx will already do this) config.serve_static_files = false # Compress JavaScripts and CSS config.assets.js_compressor = :uglifier # Don't fallback to assets pipeline if a precompiled asset is missed config.assets.compile = false # Generate digests for assets URLs config.assets.digest = true # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. config.force_ssl = true # See everything in the log (default is :info) # config.log_level = :debug # Disable delivery errors, bad email addresses will be ignored # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation can not be found) config.i18n.fallbacks = true # Send deprecation notices to registered listeners config.active_support.deprecation = :notify # Disable automatic flushing of the log to improve performance. # config.autoflush_log = false # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new Rails.application.routes.default_url_options[:host] = Settings.kochiku_host Rails.application.routes.default_url_options[:protocol] = Settings.kochiku_protocol config.action_mailer.default_url_options = {:host => Settings.kochiku_host, :protocol => Settings.kochiku_protocol} config.action_mailer.delivery_method = :smtp config.action_mailer.smtp_settings = { :address => Settings.smtp_server, :port => 25 } end ================================================ FILE: config/environments/test.rb ================================================ Kochiku::Application.configure do # Settings specified here will take precedence over those in config/application.rb # The test environment is used exclusively to run your application's # test suite. You never need to work with it otherwise. Remember that # your test database is "scratch space" for the test suite and is wiped # and recreated between test runs. Don't rely on the data there! config.cache_classes = true # Do not eager load code on boot. This avoids loading your whole application # just for the purpose of running a single test. If you are using a tool that # preloads Rails for running tests, you may have to set it to true. config.eager_load = false # Configure static asset server for tests with Cache-Control for performance config.public_file_server.enabled = true config.public_file_server.headers = { 'Cache-Control' => 'public, max-age=3600' } # Show full error reports and disable caching config.consider_all_requests_local = true config.action_controller.perform_caching = false # Raise exceptions instead of rendering exception templates config.action_dispatch.show_exceptions = false # Disable request forgery protection in test environment config.action_controller.allow_forgery_protection = false # Tell Action Mailer not to deliver emails to the real world. # The :test delivery method accumulates sent emails in the # ActionMailer::Base.deliveries array. config.action_mailer.delivery_method = :test # Print deprecation notices to the stderr config.active_support.deprecation = :stderr config.after_initialize do Resque.redis.namespace = "resque:kochiku:test" end Rails.application.routes.default_url_options[:host] = "localhost:3001" config.action_mailer.default_url_options = {:host => "localhost:3000"} end ================================================ FILE: config/initializers/backtrace_silencers.rb ================================================ # Be sure to restart your server when you modify this file. # You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. # Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ } # You can also remove all the silencers if you're trying to debug a problem that might stem from framework code. # Rails.backtrace_cleaner.remove_silencers! ================================================ FILE: config/initializers/cocaine.rb ================================================ Cocaine::CommandLine.logger = Rails.logger ================================================ FILE: config/initializers/inflections.rb ================================================ # Be sure to restart your server when you modify this file. # Add new inflection rules using the following format # (all these examples are active by default): # ActiveSupport::Inflector.inflections do |inflect| # inflect.plural /^(ox)$/i, '\1en' # inflect.singular /^(ox)en/i, '\1' # inflect.irregular 'person', 'people' # inflect.uncountable %w( fish sheep ) # end # # These inflection rules are supported but not enabled by default: # ActiveSupport::Inflector.inflections do |inflect| # inflect.acronym 'RESTful' # end ================================================ FILE: config/initializers/load_build_strategy.rb ================================================ if Rails.env.test? || Rails.env.development? require 'build_strategies/no_op_build_strategy' else require 'build_strategies/production_build_strategy' end ================================================ FILE: config/initializers/mime_types.rb ================================================ # Be sure to restart your server when you modify this file. # Add new mime types for use in respond_to blocks: # Mime::Type.register "text/richtext", :rtf # Mime::Type.register_alias "text/html", :iphone ================================================ FILE: config/initializers/readthis.rb ================================================ if Rails.env.staging? || Rails.env.production? # Allow Rails to continue serving requests if Redis crashes # https://github.com/sorentwo/readthis#fault-tolerance Readthis.fault_tolerant = true end ================================================ FILE: config/initializers/redis.rb ================================================ REDIS = Redis.new( host: Settings.redis_host, port: Settings.redis_port ) ================================================ FILE: config/initializers/resque.rb ================================================ require 'resque-scheduler' require 'resque-retry' require 'resque/failure/redis' require 'resque-retry/server' Resque.redis = REDIS Resque.redis.namespace = "resque:kochiku" # Necessary to specify the schedule file here for the scheduled jobs to appear # in the resque-web UI Resque.schedule = YAML.load_file('config/resque_schedule.yml') Resque::Failure::MultipleWithRetrySuppression.classes = [Resque::Failure::Redis] Resque::Failure.backend = Resque::Failure::MultipleWithRetrySuppression ================================================ FILE: config/initializers/secret_token.rb ================================================ # Be sure to restart your server when you modify this file. # Your secret key is used for verifying the integrity of signed cookies. # If you change this key, all old signed cookies will become invalid! # Make sure the secret is at least 30 characters and all random, # no regular words or you'll be exposed to dictionary attacks. # Use a random hex since so we don't currently use cookies for anything in # Kochiku Kochiku::Application.config.secret_key_base = SecureRandom.hex(64) ================================================ FILE: config/initializers/session_store.rb ================================================ # Be sure to restart your server when you modify this file. Kochiku::Application.config.session_store :cookie_store, :key => '_kochiku_session' # Use the database for sessions instead of the cookie-based default, # which shouldn't be used to store highly confidential information # (create the session table with "rails generate session_migration") # Kochiku::Application.config.session_store :active_record_store ================================================ FILE: config/initializers/wrap_parameters.rb ================================================ # Be sure to restart your server when you modify this file. # # This file contains settings for ActionController::ParamsWrapper which # is enabled by default. # Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array. ActiveSupport.on_load(:action_controller) do wrap_parameters :format => [] end ActiveSupport.on_load(:active_record) do # The new default is false but Kochiku started when it was true self.include_root_in_json = true end ================================================ FILE: config/kochiku.yml ================================================ test_command: 'script/ci' ruby: - 2.4.3 targets: - type: spec glob: spec/**/*_spec.rb workers: 1 ================================================ FILE: config/kochiku.yml.sample ================================================ # By listing ruby versions, all of you tests can be run against multiple versions ruby: - 2.1.2 # You can list additional log files to be uploaded by the workers # the stdout from your build will always be uploaded log_file_globs: - log/test.log - myLogs/*.log # Your test command should be specified here test_command: script/ci # You can create a script to be run inside your repo after a green build on_success_script: script/success # Listing targets helps kochiku shard your build targets: - type: spec glob: spec/**/*_spec.rb workers: 1 # If you have multiple workers, you can set a balance strategy balance: round_robin # is the default strategy # Creating a time manifest helps kochiku better partition the target time_manifest: config/ci/time_manifest.yml # requires round_robin balance # Listing log files for a target overrides the global log files log_file_globs: - log/*.html ================================================ FILE: config/locales/en.yml ================================================ # Sample localization file for English. Add more files in this directory for other locales. # See https://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points. en: hello: "Hello world" ================================================ FILE: config/resque_schedule.yml ================================================ poll_repositories_for_changes: every: - "10m" - :first_in: "5s" class: "PollRepositoriesJob" queue: low args: description: "Fetches any missed changes from added repositories" enforce_timeouts_on_attempts: every: - "5m" - :first_in: "5m" class: "EnforceTimeoutsJob" queue: low args: description: "Errors any attempts where the workers should have timed out" ================================================ FILE: config/routes.rb ================================================ require 'resque/server' Kochiku::Application.routes.draw do mount Resque::Server.new, :at => '/resque' if Rails.env.development? # https://github.com/rails/rails/pull/17896 get '/rails/mailers' => "rails/mailers#index" get '/rails/mailers/*path' => "rails/mailers#preview" end root :to => "repositories#dashboard" get '/_status' => "status#available" # /repositories/1/build-ref?ref=master&sha=abc123 resources :repositories, only: [:index, :create, :new, :update, :destroy] do member do post "build-ref", :action => 'build_ref', :as => 'build_ref' end end match '/XmlStatusReport.aspx', to: "branches#status_report", defaults: {:format => 'xml'}, via: :get match '/worker_health', to: "dashboards#build_history_by_worker", via: :get, as: :build_history_by_worker match 'builds/:id' => "builds#build_redirect", :via => :get, :as => :build_redirect, :id => /\d+/ match 'builds/:id/status' => "builds#build_status", :via => :get, :as => :build_status, :id => /\d+/, :defaults => { :format => 'json' } match 'builds/:ref' => "builds#build_ref_redirect", :via => :get, :as => :build_ref_redirect match '/build_attempts/:build_attempt_id/build_artifacts' => "build_artifacts#create", :via => :post match '/build_attempts/:id/start' => "build_attempts#start", :via => :post match '/build_attempts/:id/finish' => "build_attempts#finish", :via => :post, :as => :finish_build_attempt # left here for backward compatibility in case if anyone uses it. /build_attempts/:id should be used instead. match '/build_attempts/:id/build_part' => "build_attempts#show", :via => :get, :as => :build_part_redirect match '/build_attempts/:id/stream_logs' => "build_attempts#stream_logs", :via => :get, :as => :stream_logs match '/build_attempts/:id/stream_logs_chunk' => "build_attempts#stream_logs_chunk", :via => :get, :as => :stream_logs_chunk match '/pull-request-builder' => "pull_requests#build", :via => :post, :as => :pull_request_build get 'badge/*repository_path', to: 'branches#badge' # Redirects for legacy urls get '/projects/:project_id/builds/:build_id', to: redirect('/builds/%{build_id}') resources :build_artifacts, :only => [:show] resources :builds, only: [:create] resources :build_attempts, only: [:show] scope path: "*repository_path", as: 'repository', constraints: { repository_path: /[^\/]+\/[^\/]+/ }, format: false do get 'edit', to: 'repositories#edit' resources :builds, only: [:show] do post 'toggle-merge-on-success', :action => "toggle_merge_on_success", :on => :member, :as => :toggle_merge_on_success patch 'abort', :action => "abort", :on => :member get 'status', :action => "build_status", :on => :member, :defaults => { :format => 'json' } post 'rebuild-failed-parts', :action => "rebuild_failed_parts", :on => :member, :as => :rebuild_failed_parts post 'retry-partitioning', :action => "retry_partitioning", :on => :member, :as => :retry_partitioning get 'modified_time', :action => "modified_time", :on => :member, :defaults => { :format => 'json' } get 'refresh_build_part_info', :action => "refresh_build_part_info", :on => :member, :defaults => { :format => 'json' } post 'resend-status', :action => "resend_status", :on => :member, :defaults => { :format => 'json' } resources :build_parts, as: 'parts', path: 'parts', only: [:show] do post 'rebuild', on: :member get 'modified_time', action: "modified_time", on: :member, defaults: { format: 'json' } get 'refresh_build_part_info', :action => "refresh_build_part_info", :on => :member, :defaults => { :format => 'json' } end end # override branch id to allow branch name to contain both slashes and dots resources :branches, path: "", only: [:index, :show], constraints: { id: /.+/ } do member do post 'request-new-build', action: "request_new_build" get 'build-time-history', action: "build_time_history", defaults: { format: 'json' } get 'health', action: 'health' end get 'status-report', action: "status_report", on: :collection end end end ================================================ FILE: config.ru ================================================ # This file is used by Rack-based servers to start the application. require ::File.expand_path('../config/environment', __FILE__) run Kochiku::Application ================================================ FILE: db/migrate/20110621212000_create_schema.rb ================================================ class CreateSchema < ActiveRecord::Migration[5.0] def self.up create_table :builds do |t| t.string :sha t.string :state t.string :queue t.timestamps(null: false) end create_table :build_parts do |t| t.integer :build_id t.string :kind t.text :paths t.timestamps(null: false) end create_table :build_part_results do |t| t.integer :build_part_id t.datetime :started_at t.datetime :finished_at t.string :builder t.string :result t.timestamps(null: false) end create_table :build_artifacts do |t| t.integer :build_part_result_id t.string :type t.text :content t.timestamps(null: false) end end def self.down drop_table :build_artifacts drop_table :build_part_results drop_table :build_parts drop_table :builds end end ================================================ FILE: db/migrate/20110624003418_change_artifact_type_to_name.rb ================================================ class ChangeArtifactTypeToName < ActiveRecord::Migration[5.0] def self.up rename_column :build_artifacts, :type, :name end def self.down rename_column :build_artifacts, :name, :type end end ================================================ FILE: db/migrate/20110624015709_rename_build_part_result_result_to_state.rb ================================================ class RenameBuildPartResultResultToState < ActiveRecord::Migration[5.0] def self.up rename_column :build_part_results, :result, :state end def self.down rename_column :build_part_results, :state, :result end end ================================================ FILE: db/migrate/20110708203120_change_build_artifacts_for_carrier_wave.rb ================================================ class ChangeBuildArtifactsForCarrierWave < ActiveRecord::Migration[5.0] def self.up rename_column :build_artifacts, :name, :log_file remove_column :build_artifacts, :content end def self.down add_column :build_artifacts, :content, :text rename_column :build_artifacts, :log_file, :name end end ================================================ FILE: db/migrate/20110713175724_rename_build_part_result_to_build_part_run.rb ================================================ class RenameBuildPartResultToBuildPartRun < ActiveRecord::Migration[5.0] def self.up rename_table :build_part_results, :build_attempts rename_column :build_artifacts, :build_part_result_id, :build_attempt_id end def self.down rename_column :build_artifacts, :build_attempt_id, :build_part_result_id rename_table :build_attempts, :build_part_results end end ================================================ FILE: db/migrate/20110713191536_add_foreign_key_indexes.rb ================================================ class AddForeignKeyIndexes < ActiveRecord::Migration[5.0] def self.up add_index :build_parts, :build_id add_index :build_attempts, :build_part_id add_index :build_artifacts, :build_attempt_id end def self.down remove_index :build_parts, column: :build_id remove_index :build_attempts, column: :build_part_id remove_index :build_artifacts, column: :build_attempt_id end end ================================================ FILE: db/migrate/20110719204508_create_projects.rb ================================================ class CreateProjects < ActiveRecord::Migration[5.0] def self.up create_table :projects do |t| t.string :name t.string :branch t.timestamps(null: false) end add_index :projects, [:name, :branch] end def self.down drop_table :projects end end ================================================ FILE: db/migrate/20110719205413_add_project_id_to_builds.rb ================================================ class AddProjectIdToBuilds < ActiveRecord::Migration[5.0] def self.up add_column :builds, :project_id, :integer add_index :builds, :project_id end def self.down remove_index :builds, column: :project_id remove_column :builds, :project_id end end ================================================ FILE: db/migrate/20110721185201_rename_builds_sha_to_ref.rb ================================================ class RenameBuildsShaToRef < ActiveRecord::Migration[5.0] def self.up rename_column :builds, :sha, :ref end def self.down rename_column :builds, :ref, :sha end end ================================================ FILE: db/migrate/20110801215540_rename_error_state_to_errored.rb ================================================ class RenameErrorStateToErrored < ActiveRecord::Migration[5.0] def self.up execute("UPDATE build_attempts SET state='errored' WHERE state='error'") execute("UPDATE builds SET state='errored' WHERE state='error'") end def self.down execute("UPDATE builds SET state='error' WHERE state='errored'") execute("UPDATE build_attempts SET state='error' WHERE state='errored'") end end ================================================ FILE: db/migrate/20120803005242_add_merge_bool_to_build.rb ================================================ class AddMergeBoolToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :auto_merge, :boolean end end ================================================ FILE: db/migrate/20120817225343_add_branch_to_build.rb ================================================ class AddBranchToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :branch, :string end end ================================================ FILE: db/migrate/20121008211955_create_repositories.rb ================================================ class CreateRepositories < ActiveRecord::Migration[5.0] def change create_table :repositories do |t| t.string :url t.string :test_command t.text :options t.timestamps(null: false) end add_index :repositories, :url add_column :projects, :repository_id, :integer add_index :projects, :repository_id end end ================================================ FILE: db/migrate/20121017173936_add_github_repository_id_to_repository.rb ================================================ class AddGithubRepositoryIdToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :github_post_receive_hook_id, :integer end end ================================================ FILE: db/migrate/20121017182543_fix_repository_schema.rb ================================================ class FixRepositorySchema < ActiveRecord::Migration[5.0] def change add_column :repositories, :run_ci, :boolean add_column :repositories, :use_branches_on_green, :boolean add_column :repositories, :build_pull_requests, :boolean add_column :repositories, :on_green_update, :string add_column :repositories, :use_spec_and_ci_queues, :boolean add_column :repositories, :repo_cache_dir, :string end end ================================================ FILE: db/migrate/20121017184946_remove_options_from_repository.rb ================================================ class RemoveOptionsFromRepository < ActiveRecord::Migration[5.0] def change remove_column :repositories, :options end end ================================================ FILE: db/migrate/20121017222538_add_target_name_to_builds.rb ================================================ class AddTargetNameToBuilds < ActiveRecord::Migration[5.0] def change add_column :builds, :target_name, :string end end ================================================ FILE: db/migrate/20121017224003_add_command_flag_to_repositories.rb ================================================ class AddCommandFlagToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :command_flag, :string end end ================================================ FILE: db/migrate/20121018182435_add_options_to_build_part.rb ================================================ class AddOptionsToBuildPart < ActiveRecord::Migration[5.0] def change add_column :build_parts, :options, :text end end ================================================ FILE: db/migrate/20121024005715_add_send_build_failure_email_to_repository.rb ================================================ class AddSendBuildFailureEmailToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :send_build_failure_email, :boolean, :default => true end end ================================================ FILE: db/migrate/20121024164929_record_build_failure_email_sent.rb ================================================ class RecordBuildFailureEmailSent < ActiveRecord::Migration[5.0] def change add_column :builds, :build_failure_email_sent, :boolean end end ================================================ FILE: db/migrate/20121024210129_add_success_script_to_repositories.rb ================================================ class AddSuccessScriptToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :on_success_script, :string add_column :builds, :promoted, :boolean end end ================================================ FILE: db/migrate/20121024212949_add_on_success_log_file_to_build.rb ================================================ class AddOnSuccessLogFileToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :on_success_script_log_file, :string end end ================================================ FILE: db/migrate/20121030213442_add_queue_to_repository.rb ================================================ class AddQueueToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :queue_override, :string end end ================================================ FILE: db/migrate/20121101220831_add_timeout_to_repository.rb ================================================ class AddTimeoutToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :timeout, :integer, :default => 40 end end ================================================ FILE: db/migrate/20130226232844_add_index_to_build_ref.rb ================================================ class AddIndexToBuildRef < ActiveRecord::Migration[5.0] def change add_index :builds, :ref end end ================================================ FILE: db/migrate/20130409144945_add_on_success_note_to_repositories.rb ================================================ class AddOnSuccessNoteToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :on_success_note, :string end end ================================================ FILE: db/migrate/20130511012855_add_deployable_map_to_build.rb ================================================ class AddDeployableMapToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :deployable_map, :text end end ================================================ FILE: db/migrate/20130626183046_add_maven_modules_to_build.rb ================================================ class AddMavenModulesToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :maven_modules, :text end end ================================================ FILE: db/migrate/20130627194433_add_index_to_build_part_paths.rb ================================================ class AddIndexToBuildPartPaths < ActiveRecord::Migration[5.0] def change add_index :build_parts, :paths, :length => {:paths => 255} end end ================================================ FILE: db/migrate/20130709123456_add_upload_artifacts_to_build_parts.rb ================================================ class AddUploadArtifactsToBuildParts < ActiveRecord::Migration[5.0] def change add_column :build_parts, :upload_artifacts, :boolean end end ================================================ FILE: db/migrate/20130822191419_add_queue_to_build_part.rb ================================================ class AddQueueToBuildPart < ActiveRecord::Migration[5.0] def up add_column :build_parts, :queue, :string execute("UPDATE build_parts,builds SET build_parts.queue = builds.queue WHERE builds.id = build_parts.build_id") remove_column :builds, :queue remove_column :repositories, :use_spec_and_ci_queues end def down add_column :repositories, :use_spec_and_ci_queues, :boolean add_column :builds, :queue, :string remove_column :build_parts, :queue end end ================================================ FILE: db/migrate/20130822231850_remove_upload_artifacts_from_build_parts.rb ================================================ class RemoveUploadArtifactsFromBuildParts < ActiveRecord::Migration[5.0] def up remove_column :build_parts, :upload_artifacts end def down add_column :build_parts, :upload_artifacts, :boolean end end ================================================ FILE: db/migrate/20130823210844_add_retry_count_to_build_part.rb ================================================ class AddRetryCountToBuildPart < ActiveRecord::Migration[5.0] def change add_column :build_parts, :retry_count, :integer, default: 0 end end ================================================ FILE: db/migrate/20130823231854_remove_java_specific_stuff.rb ================================================ class RemoveJavaSpecificStuff < ActiveRecord::Migration[5.1] def up remove_column :builds, :deployable_map remove_column :builds, :maven_modules end def down add_column :builds, :deployable_map, :text add_column :builds, :maven_modules, :text end end ================================================ FILE: db/migrate/20130823234546_remove_queue_override_from_repositories.rb ================================================ class RemoveQueueOverrideFromRepositories < ActiveRecord::Migration[5.0] def up remove_column :repositories, :queue_override end def down add_column :repositories, :queue_override, :string end end ================================================ FILE: db/migrate/20130910190203_add_repository_name_as_column.rb ================================================ class AddRepositoryNameAsColumn < ActiveRecord::Migration[5.0] Rails.logger = Logger.new(STDOUT) URL_PARSERS = { "git@" => /@(.*):(.*)\/(.*)\.git/, "git:" => /:\/\/(.*)\/(.*)\/(.*)\.git/, "http" => /https?:\/\/(.*)\/(.*)\/([^.]*)\.?/, 'ssh:' => %r{ssh://git@(.*):(\d+)/(.*)/([^.]+)\.git} }.freeze class Repository < ActiveRecord::Base end def project_params(url) # TODO: Use the parsers in the RemoteServer classes. parser = URL_PARSERS[url.slice(0,4)] match = url.match(parser) if match.length > 4 { host: match[1], port: match[2].to_i, username: match[3], repository: match[4] } else { host: match[1], username: match[2], repository: match[3] } end end def old_style_repository_name(url) project_params(url)[:repository] end def up add_column :repositories, :repository_name, :string Repository.all.each do |repository| repository.update_attribute(:repository_name, old_style_repository_name(repository.url)) end repository_count = Repository.all.each_with_object({}) do |repository, duplicates| duplicates[repository.repository_name] ||= 0 duplicates[repository.repository_name] += 1 duplicates end duplicates = repository_count.select { |name, count| count > 1 } if duplicates.any? Rails.logger.warn("") Rails.logger.warn("") Rails.logger.warn(("*" * 80)) Rails.logger.warn("Duplicate repositories detected.") end duplicates.each do |name, count| Rails.logger.warn("Found #{count} repositories named #{name}. Please rename them.") end end def down remove_column :repositories, :repository_name end end ================================================ FILE: db/migrate/20131217022000_add_error_text_to_build.rb ================================================ class AddErrorTextToBuild < ActiveRecord::Migration[5.0] def change add_column :builds, :error_details, :text end end ================================================ FILE: db/migrate/20140123234208_add_allows_kochiku_merges_to_repository.rb ================================================ class AddAllowsKochikuMergesToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :allows_kochiku_merges, :boolean, default: true end end ================================================ FILE: db/migrate/20140128180258_rename_auto_merge_on_build.rb ================================================ class RenameAutoMergeOnBuild < ActiveRecord::Migration[5.0] def change rename_column :builds, :auto_merge, :merge_on_success end end ================================================ FILE: db/migrate/20140415001051_remove_use_branches_on_green_from_repositories.rb ================================================ class RemoveUseBranchesOnGreenFromRepositories < ActiveRecord::Migration[5.0] def change remove_column :repositories, :use_branches_on_green, :boolean end end ================================================ FILE: db/migrate/20140415011144_remove_command_flag_from_repositories.rb ================================================ class RemoveCommandFlagFromRepositories < ActiveRecord::Migration[5.0] def change remove_column :repositories, :command_flag, :string remove_column :builds, :target_name, :string end end ================================================ FILE: db/migrate/20140506012721_unique_index_on_builds_ref.rb ================================================ class UniqueIndexOnBuildsRef < ActiveRecord::Migration[5.0] def up remove_index :builds, column: :ref # set length to 40 characters and add not null constraint change_column :builds, :ref, :string, { limit: 40, null: false } add_index :builds, [:ref, :project_id], :unique => true end def down remove_index :builds, column: [:ref, :project_id] change_column :builds, :ref, :string add_index :builds, :ref end end ================================================ FILE: db/migrate/20140507184819_add_host_and_namespace_to_repositories.rb ================================================ class AddHostAndNamespaceToRepositories < ActiveRecord::Migration[5.0] def up rename_column :repositories, :repository_name, :name change_column :repositories, :name, :string, null: false # add not null constraint add_column :repositories, :host, :string, null: false add_column :repositories, :namespace, :string, null: true # generic git servers will not have a namespace add_index :repositories, [:host, :namespace, :name], name: 'index_repositories_on_host_and_namespace_and_name', unique: true Repository.all.each do |repository| attributes = RemoteServer.for_url(repository.url).attributes repository.update_attributes!( :host => attributes.fetch(:host), :namespace => attributes.fetch(:repository_namespace) ) end end def down remove_index :repositories, name: 'index_repositories_on_host_and_namespace_and_name' remove_columns :repositories, :namespace, :host change_column :repositories, :name, :string, null: true rename_column :repositories, :name, :repository_name end end ================================================ FILE: db/migrate/20140617214701_add_success_email.rb ================================================ class AddSuccessEmail < ActiveRecord::Migration[5.0] def change add_column :builds, :build_success_email_sent, :boolean, :default => false, :null => false add_column :repositories, :send_build_success_email, :boolean, :default => true, :null => false reversible do |dir| change_table :builds do |t| dir.up do execute 'UPDATE builds SET build_failure_email_sent = 0 WHERE build_failure_email_sent IS NULL' t.change :build_failure_email_sent, :boolean, :default => false, :null => false end dir.down { t.change :build_failure_email_sent, :boolean, :default => nil, :null => true } end change_table :repositories do |t| dir.up do execute 'UPDATE repositories SET send_build_failure_email = 1 WHERE send_build_failure_email IS NULL' t.change :send_build_failure_email, :boolean, :default => true, :null => false end dir.down { t.change :send_build_failure_email, :boolean, :default => true, :null => true } end end end end ================================================ FILE: db/migrate/20140715225910_remove_notes.rb ================================================ class RemoveNotes < ActiveRecord::Migration[5.0] def change remove_column :repositories, :on_success_note, :string end end ================================================ FILE: db/migrate/20141031234747_add_email_first_failure_to_repositories.rb ================================================ class AddEmailFirstFailureToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :email_on_first_failure, :boolean, default: false, null: false end end ================================================ FILE: db/migrate/20150324001246_remove_on_success_script_from_repositories.rb ================================================ class RemoveOnSuccessScriptFromRepositories < ActiveRecord::Migration[5.0] def up # Guard against deleting any data rows_with_old_data = select_value("select count(*) from repositories where on_success_script IS NOT NULL AND on_success_script != ''") if rows_with_old_data > 0 err_message = <<-ERR_MESSAGE "Found #{rows_with_old_data} rows in the Repositories table with non-empty values" "for `on_success_script`." "Kochiku no longer supports on_success_script inside of the repository table." "The new location is inside of each project's kochiku.yml file." "Please remove the data from the on_success_script column and re-run this migration." ERR_MESSAGE Rails.logger.error(err_message) exit(1) end remove_column :repositories, :on_success_script, :string end def down add_column :repositories, :on_success_script, :string end end ================================================ FILE: db/migrate/20150331160909_add_send_merge_successful_email.rb ================================================ class AddSendMergeSuccessfulEmail < ActiveRecord::Migration[5.0] def change add_column :repositories, :send_merge_successful_email, :boolean, default: true, null: false end end ================================================ FILE: db/migrate/20150714234635_add_log_port_to_build_attempt.rb ================================================ class AddLogPortToBuildAttempt < ActiveRecord::Migration[5.0] def change add_column :build_attempts, :log_streamer_port, :integer end end ================================================ FILE: db/migrate/20150717214656_create_branches.rb ================================================ class CreateBranches < ActiveRecord::Migration[5.0] def change create_table :branches do |t| t.references :repository, null: false t.string :name, null: false t.boolean :convergence, null: false, default: false t.timestamps null: false t.index([:repository_id, :name], unique: true) t.index(:convergence) end change_table :builds do |t| t.references :branch, index: true t.index([:ref, :branch_id], unique: true) end end end ================================================ FILE: db/migrate/20150717220149_assign_builds_to_branches.rb ================================================ # In this migration intentionally go out of our way to not use the Project # model so that it can be removed from the codebase. class AssignBuildsToBranches < ActiveRecord::Migration[5.0] def up # Be mindful of build.branch versus build#branch_id # `build.branch` is a reference to the name of the branch as a string Build.where(branch_id: nil).find_each do |build| repository_id = connection.select_value("SELECT repository_id FROM projects WHERE id = #{build.project_id}") if repository_id.nil? Rails.logger.error "skipping Build #{build.id} because its project or repository not longer exists" next end branch_record = if build.branch.present? Branch.find_or_create_by(repository_id: repository_id, name: build.branch) else Branch.find_or_create_by(repository_id: repository_id, name: "unknown") end build.update_column(:branch_id, branch_record.id) end # Automatically set all master branches as convergence branches to maintain # previous behavior. Branch.where(name: 'master').update_all(convergence: true) # the 'branch' column is removed from builds in the next migration end def down end end ================================================ FILE: db/migrate/20150717231250_remove_branch_string_from_builds.rb ================================================ class RemoveBranchStringFromBuilds < ActiveRecord::Migration[5.0] def change # The previous migration (AssignBuildsToBranches) mapped branch_id on # builds to the newly introduced Branch records. With that complete it is # safe to remove the legacy branch (string) column from builds. remove_column :builds, :branch, :string end end ================================================ FILE: db/migrate/20150719130110_index_repositories_namespace_and_name.rb ================================================ class IndexRepositoriesNamespaceAndName < ActiveRecord::Migration[5.0] def change add_index :repositories, [:namespace, :name], unique: true end end ================================================ FILE: db/migrate/20151111080255_remove_repo_cache_dir_from_repositories.rb ================================================ class RemoveRepoCacheDirFromRepositories < ActiveRecord::Migration[5.0] def change remove_column :repositories, :repo_cache_dir, :string end end ================================================ FILE: db/migrate/20151114185514_fix_convergence_index.rb ================================================ # In order for the index on convergence col to be useful it needs to be # namespaced by repository_id class FixConvergenceIndex < ActiveRecord::Migration[5.0] def change # Add the new index first to avoid killing performance add_index :branches, [:repository_id, :convergence] remove_index :branches, column: :convergence end end ================================================ FILE: db/migrate/20160408214135_index_created_at_on_build_attempts.rb ================================================ class IndexCreatedAtOnBuildAttempts < ActiveRecord::Migration[5.0] def change add_index :build_attempts, :created_at end end ================================================ FILE: db/migrate/20170804214538_add_enabled_bool_to_repositories.rb ================================================ class AddEnabledBoolToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :enabled, :boolean, default: true, null: false end end ================================================ FILE: db/migrate/20180208202524_add_test_command_to_builds.rb ================================================ class AddTestCommandToBuilds < ActiveRecord::Migration[5.0] def change add_column :builds, :test_command, :string end end ================================================ FILE: db/migrate/20180220185338_add_assume_lost_after_to_repository.rb ================================================ class AddAssumeLostAfterToRepository < ActiveRecord::Migration[5.0] def change add_column :repositories, :assume_lost_after, :integer end end ================================================ FILE: db/migrate/20180227222254_add_initiated_by_to_builds.rb ================================================ class AddInitiatedByToBuilds < ActiveRecord::Migration[5.0] def change add_column :builds, :initiated_by, :string end end ================================================ FILE: db/migrate/20180301221320_add_instance_type_to_build_attempts.rb ================================================ class AddInstanceTypeToBuildAttempts < ActiveRecord::Migration[5.0] def change add_column :build_attempts, :instance_type, :string end end ================================================ FILE: db/migrate/20180619210823_add_kochiku_yml_config_to_builds.rb ================================================ class AddKochikuYmlConfigToBuilds < ActiveRecord::Migration[5.1] def change add_column :builds, :kochiku_yml_config, :text end end ================================================ FILE: db/schema.rb ================================================ # This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20180619210823) do create_table "branches", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "repository_id", null: false t.string "name", null: false t.boolean "convergence", default: false, null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["repository_id", "convergence"], name: "index_branches_on_repository_id_and_convergence" t.index ["repository_id", "name"], name: "index_branches_on_repository_id_and_name", unique: true t.index ["repository_id"], name: "index_branches_on_repository_id" end create_table "build_artifacts", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "build_attempt_id" t.string "log_file" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["build_attempt_id"], name: "index_build_artifacts_on_build_attempt_id" end create_table "build_attempts", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "build_part_id" t.datetime "started_at" t.datetime "finished_at" t.string "builder" t.string "state" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "log_streamer_port" t.string "instance_type" t.index ["build_part_id"], name: "index_build_attempts_on_build_part_id" t.index ["created_at"], name: "index_build_attempts_on_created_at" end create_table "build_parts", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "build_id" t.string "kind" t.text "paths" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.text "options" t.string "queue" t.integer "retry_count", default: 0 t.index ["build_id"], name: "index_build_parts_on_build_id" t.index ["paths"], name: "index_build_parts_on_paths", length: { paths: 255 } end create_table "builds", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.string "ref", limit: 40, null: false t.string "state" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "project_id" t.boolean "merge_on_success" t.boolean "build_failure_email_sent", default: false, null: false t.boolean "promoted" t.string "on_success_script_log_file" t.text "error_details" t.boolean "build_success_email_sent", default: false, null: false t.integer "branch_id" t.string "test_command" t.string "initiated_by" t.text "kochiku_yml_config" t.index ["branch_id"], name: "index_builds_on_branch_id" t.index ["project_id"], name: "index_builds_on_project_id" t.index ["ref", "branch_id"], name: "index_builds_on_ref_and_branch_id", unique: true t.index ["ref", "project_id"], name: "index_builds_on_ref_and_project_id", unique: true end create_table "projects", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.string "name" t.string "branch" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "repository_id" t.index ["name", "branch"], name: "index_projects_on_name_and_branch" t.index ["repository_id"], name: "index_projects_on_repository_id" end create_table "repositories", id: :integer, force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.string "url" t.string "test_command" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "github_post_receive_hook_id" t.boolean "run_ci" t.boolean "build_pull_requests" t.string "on_green_update" t.boolean "send_build_failure_email", default: true, null: false t.integer "timeout", default: 40 t.string "name", null: false t.boolean "allows_kochiku_merges", default: true t.string "host", null: false t.string "namespace" t.boolean "send_build_success_email", default: true, null: false t.boolean "email_on_first_failure", default: false, null: false t.boolean "send_merge_successful_email", default: true, null: false t.boolean "enabled", default: true, null: false t.integer "assume_lost_after" t.index ["host", "namespace", "name"], name: "index_repositories_on_host_and_namespace_and_name", unique: true t.index ["namespace", "name"], name: "index_repositories_on_namespace_and_name", unique: true t.index ["url"], name: "index_repositories_on_url" end end ================================================ FILE: db/seeds.rb ================================================ # Eagerly load all of the models to avoid errors related to multiple threads Dir[Rails.root.join("app/models/*.rb")].each { |f| require f } repo_infos = [ { :name => 'kandan', :enabled => true, :location => "git@github.com:kandanapp/kandan.git", :build_attempt_state => 'passed', :types => [:spec, :cucumber, :rubocop, :lint, :unit] }, { :name => 'copycopter-server', :enabled => true, :build_attempt_state => 'passed', :location => "git@github.com:copycopter/copycopter-server.git", :types => [:spec] }, { :name => 'lobsters', :enabled => false, :build_attempt_state => 'errored', :location => "git@github.com:jcs/lobsters.git", :types => [:junit] } ] @builders = %w/ builder01.local builder02.local / def artifact_directory Rails.root.join('tmp') end def write_the_sample_stdout_log_file FileUtils.mkdir_p(artifact_directory) name = artifact_directory.join('stdout.log') File.open(name, 'w') do |file| 75.times { |i| file.puts "Line #{i}" } end name end def sample_stdout_log_file @sample_file ||= artifact_directory.join('stdout.log') end def create_build_artifact(attempt) BuildArtifact.create!( log_file: sample_stdout_log_file.open, build_attempt: attempt ) end def create_build_part(build, kind, paths, build_attempt_state) bp = BuildPart.create!(:build_instance => build, :kind => kind, :paths => paths, :queue => 'ci') build_attempt_state ||= (BuildAttempt::STATES + ['passed'] * 5).sample finished = if BuildAttempt::IN_PROGRESS_BUILD_STATES.include?(build_attempt_state) nil else rand(7200).seconds.from_now end attempt = BuildAttempt.create!( :build_part => bp, :builder => @builders.sample, :state => build_attempt_state, :started_at => Time.current, :finished_at => finished ) create_build_artifact(attempt) bp end def create_build(branch, test_types, build_attempt_state: 'passed') build = Build.create!(:branch_record => branch, :ref => SecureRandom.hex(20), :initiated_by => 'test@email.com', :state => 'runnable') Array(test_types).each do |kind| paths = %w( spec/controllers/admin/users_controller_spec.rb spec/jobs/merchant_location_update_job_spec.rb spec/models/loyalty/payer_spec.rb spec/views/mailers/application_mailer/interval_sales_report.text.plain.erb_spec.rb ) 10.times do create_build_part(build, kind, paths, build_attempt_state) end end build.update_state_from_parts! end def populate_builds_for(branch, repo_info) thread_list = [] 10.times do thread_list << Thread.new do ActiveRecord::Base.connection_pool.with_connection do create_build(branch, repo_info[:types], build_attempt_state: repo_info[:build_attempt_state]) end end end thread_list.each { |t| t.join } end write_the_sample_stdout_log_file repos = {} repo_infos.each do |repo_info| repository = Repository.create!({:url => repo_info[:location], :test_command => "script/ci", :run_ci => true, :enabled => repo_info[:enabled]}) repos[repo_info[:name]] = repository master_branch = Branch.create!(:name => 'master', :convergence => true, :repository => repository) populate_builds_for(master_branch, repo_info) %w(feature-branch feature-branch2 feature-branch3).each do |b| developer_branch = Branch.create!(:name => b, :convergence => false, :repository => repository) populate_builds_for(developer_branch, repo_info) end end # create an extra running build for copycopter-server to show something that is in progress copycopter_branch = Branch.where(repository: repos['copycopter-server'], name: 'master').first create_build(copycopter_branch, %w(spec), build_attempt_state: 'running') ================================================ FILE: lib/build_strategies/no_op_build_strategy.rb ================================================ class BuildStrategy class << self def promote_build(build) end def merge_ref(ref) end def run_success_script(build) end end end ================================================ FILE: lib/build_strategies/production_build_strategy.rb ================================================ require 'git_blame' class BuildStrategy class << self # The primary function of promote_build is to update the branches specified # in on_green_update field of Repository. # # A feature of promote_build is that it will not cause the promotion ref to # move backwards. For instance, if build 1 finishes after build 2, we don't # cause the promotion ref to move backwards by overwriting promotion_ref # with build 1 # # promote_build does use a force push in order to overwrite experimental # branches that may have been manually placed on the promotion ref by a # developer for testing. def promote_build(build) GitRepo.inside_repo(build.repository) do build.repository.promotion_refs.each do |promotion_ref| unless GitRepo.included_in_promotion_ref?(build.ref, promotion_ref) update_branch(promotion_ref, build.ref) end end end end def run_success_script(build) GitRepo.inside_copy(build.repository, build.ref) do # stderr is redirected to stdout so that all output is captured in the log command = Cocaine::CommandLine.new(on_success_command(build), "2>&1", expected_outcodes: 0..255) output = command.run output += "\nExited with status: #{command.exit_status}" script_log = FilelessIO.new(output) script_log.original_filename = "on_success_script.log" build.on_success_script_log_file = script_log build.save! end end def merge_ref(build) # If only Stash is used this could be just inside_repo GitRepo.inside_copy(build.repository, "master") do begin emails = GitBlame.emails_in_branch(build) merger = build.repository.remote_server.merge_executor.new(build) merge_info = merger.merge_and_push if build.repository.send_merge_successful_email? MergeMailer.merge_successful(build, merge_info[:merge_commit], emails, merge_info[:log_output]).deliver_now end merger.delete_branch rescue GitMergeExecutor::GitMergeFailedError => ex MergeMailer.merge_failed(build, emails, ex.message).deliver_now end end end def update_branch(branch_name, ref_to_promote) Cocaine::CommandLine.new("git push", "--force origin #{ref_to_promote}:refs/heads/#{branch_name}").run end def on_success_command(build) git_commit = build.ref git_branch = build.branch_record.name "GIT_BRANCH=#{git_branch} GIT_COMMIT=#{git_commit} #{build.on_success_script}" end end end ================================================ FILE: lib/capistrano/tasks/deploy.cap ================================================ # Users may choose to: # # A) edit the deploy tasks directly inside this file # B) create another .cap file and define your deploy:restart task inside # # Option B is recommended because it will make merging in upstream Kochiku # changes easy. namespace :deploy do # task :start do # on roles(:worker) do # end # on roles(:app) do # end # end # task :stop do # on roles(:worker) do # end # on roles(:app) do # end # end # task :restart do # on roles(:worker) do # # Necessary step to restart the Resque workers specific to your # # deployment # end # on roles(:app) do # # Example restart step for a Phusion Passenger deployment # execute :touch, "#{current_release}/tmp/restart.txt" # end # end task :overwrite_database_yml do on roles(:app) do |host| execute :mv, "#{release_path}/config/database.production.yml", "#{release_path}/config/database.yml" end end end # vi: filetype=ruby ================================================ FILE: lib/capistrano/tasks/kochiku.cap ================================================ namespace :kochiku do task :setup do on roles([:app, :worker]) do SSHKit.config.command_map.prefix[:gem].pop #pop off 'bundle exec' execute :gem, "install", "bundler", "--conservative", "-v", "1.3" execute "mkdir -p #{shared_path}/build-partition #{shared_path}/log_files" end end task :symlinks do on roles([:app, :worker]) do execute :mkdir, release_path.join('tmp') execute :ln, '-nfFs', shared_path.join('build-partition'), release_path.join('tmp/build-partition') execute :ln, '-nfFs', shared_path.join('log_files'), release_path.join('public/log_files') execute :ln, '-nfFs', shared_path.join('secrets'), release_path.join('config/secrets') end end end # vi: filetype=ruby ================================================ FILE: lib/fileless_io.rb ================================================ require 'stringio' class FilelessIO < StringIO attr_accessor :original_filename end ================================================ FILE: lib/git_blame.rb ================================================ require 'cocaine' require 'git_repo' class GitBlame class << self def emails_since_last_green(build) lookup_git_names_and_emails(git_names_and_emails_since_last_green(build)) end def emails_in_branch(build) lookup_git_names_and_emails(git_names_and_emails_in_branch(build)) end def last_email_in_branch(build) lookup_git_names_and_emails(last_git_name_and_email_in_branch(build)) end def changes_since_last_green(build) output = GitRepo.inside_repo(build.repository) do # TODO: Push this down into GitRepo and integration test it. Cocaine::CommandLine.new("git log --cc --format='::!::%H|%cn <%ce>|%cd|%B::!::' '#{build.previous_successful_build.try(:ref)}...#{build.ref}'").run end parse_git_changes(output) end def changes_in_branch(build) output = GitRepo.inside_repo(build.repository) do # TODO: Push this down into GitRepo and integration test it. Cocaine::CommandLine.new("git log --cc --format='::!::%H|%cn <%ce>|%cd|%B::!::' 'master..#{build.branch_record.name}'").run end parse_git_changes(output) end def files_changed_since_last_build(build, fetch_emails: false, sync: true) output = GitRepo.inside_repo(build.repository, sync: sync) do Cocaine::CommandLine.new("git log --cc --format='::!::%an:%ae::!::' --name-only '#{build.previous_build.try(:ref)}...#{build.ref}'").run end parse_git_files_changes(output, fetch_emails: fetch_emails) end def files_changed_since_last_green(build, fetch_emails: false) output = GitRepo.inside_repo(build.repository) do # TODO: Push this down into GitRepo and integration test it. Cocaine::CommandLine.new("git log --cc --format='::!::%cn:%ce::!::' --name-only '#{build.previous_successful_build.try(:ref)}...#{build.ref}'").run end parse_git_files_changes(output, fetch_emails: fetch_emails) end def files_changed_in_branch(build, fetch_emails: false, sync: true) output = GitRepo.inside_repo(build.repository, sync: sync) do # TODO: Push this down into GitRepo and integration test it. Cocaine::CommandLine.new("git log --cc --format='::!::%cn:%ce::!::' --name-only 'master..#{build.branch_record.name}'").run end parse_git_files_changes(output, fetch_emails: fetch_emails) end # net_files_changed_in_branch counts only files which have a net diff in the branch. If a branch includes a commit # to modify a file, and then a revert commit, that file will not be included in this list. def net_files_changed_in_branch(build, sync: true) # get revision of shared ancestor of master and build branch, i.e. the commit at which point this branch was created common_ancestor = GitRepo.inside_repo(build.repository, sync: sync) do Cocaine::CommandLine.new("git merge-base master #{build.branch_record.name}").run end output = GitRepo.inside_repo(build.repository, sync: sync) do Cocaine::CommandLine.new("git diff --name-status --find-renames --find-copies '#{common_ancestor.strip}..#{build.branch_record.name}'").run end parse_git_changes_by_name_status(output) end private def email_from_git_email(email) if email =~ /^#{Settings.git_pair_email_prefix}\+/ localpart, domain = email.split('@') usernames = localpart.strip.split('+') usernames[1..-1].map { |username| "#{username}@#{domain}" } else email end end def git_names_and_emails_since_last_green(build) GitRepo.inside_repo(build.repository) do Cocaine::CommandLine.new("git log --format='%cn:%ce' '#{build.previous_successful_build.try(:ref)}...#{build.ref}'").run.split("\n") end end def git_names_and_emails_in_branch(build) GitRepo.inside_repo(build.repository) do if GitRepo.branch_exist?(build.branch_record.name) Cocaine::CommandLine.new("git log --format='%cn:%ce' 'master..#{build.branch_record.name}'").run.split("\n") else [] end end end def last_git_name_and_email_in_branch(build) GitRepo.inside_repo(build.repository) do Cocaine::CommandLine.new("git log --format='%cn:%ce' -1 '#{build.branch_record.name}'").run.strip end end def lookup_git_names_and_emails(git_names_and_emails) Array(git_names_and_emails).map do |git_name_and_email| _name, email = git_name_and_email.split(":") Array(email_from_git_email(email)) end.flatten.compact.uniq end def parse_git_changes(output) output.split("::!::").each_with_object([]) do |line, git_changes| commit_hash, author, commit_date, commit_message = line.chomp.split("|") next if commit_hash.nil? || commit_message.nil? git_changes << {:hash => commit_hash, :author => author, :date => commit_date, :message => commit_message.tr("\n", " ")} end end def parse_git_files_changes(output, fetch_emails: false) email_addresses = [] output.split("\n").each_with_object([]) do |line, file_changes| next if line.empty? if line.start_with?("::!::") email_addresses = [] if fetch_emails line.split("::!::").each do |line_part| next if line_part.nil? || line_part.empty? _name, email = line_part.split(":") email_addresses = email_addresses + Array(email_from_git_email(email)) end email_addresses.compact! end else file_changes << {:file => line, :emails => email_addresses} end end end def parse_git_changes_by_name_status(output) output.split("\n").each_with_object([]) do |line, file_changes| next if line.empty? # Format from --name-status prints a status code, then any file names after that. # For example, a file rename that has 85% similarity would be output as: # R085 path/to/original_name.java path/to/new_name.java # We can safely reject the first element (status code) in the split array. files_in_line = line.split[1..-1] files_in_line.each { |file| file_changes << {file: file, emails: []} } end end end end ================================================ FILE: lib/git_merge_executor.rb ================================================ require 'open3' class GitMergeExecutor class GitFetchFailedError < StandardError; end class GitMergeFailedError < StandardError; end class GitPushFailedError < StandardError; end def initialize(build) if build.branch_record.convergence? raise "attempted to merge #{build.branch_record.name} which is a convergence branch and is ineligible for merge by Kochiku" end @build = build end # Public: Merges the branch associated with a build into the master branch # and pushes the result to remote git repo. If the merge is unsuccessful for # any reason the merge is aborted and an exception is raised. def merge_and_push Rails.logger.info("Trying to merge branch: #{@build.branch_record.name} to master after build id: #{@build.id}") begin git_fetch_and_reset merge_commit_sha, merge_log = merge_to_master push_log = push_to_remote rescue GitFetchFailedError, GitPushFailedError tries = (tries || 0) + 1 if tries < 3 sleep(10 * tries) retry else raise end end { merge_commit: merge_commit_sha, log_output: [merge_log, push_log].join("\n") } end def delete_branch begin git_fetch_and_reset delete_log, status = Open3.capture2e("git push --porcelain --delete origin #{@build.branch_record.name}") unless status.success? Rails.logger.warn("Deletion of branch #{@build.branch_record.name} failed") Rails.logger.warn(delete_log) end rescue GitFetchFailedError Rails.logger.warn("Deletion of branch #{@build.branch_record.name} failed") end end private def git_fetch_and_reset checkout_log, status = Open3.capture2e("git fetch && git checkout master && git reset --hard origin/master") unless status.success? raise_and_log(GitFetchFailedError, "Error occurred while reseting to origin/master:", checkout_log) end end def merge_to_master commit_message = "Kochiku merge of branch #{@build.branch_record.name} for build id: #{@build.id} ref: #{@build.ref}" merge_log, status = Open3.capture2e(merge_env, "git merge --no-ff -m '#{commit_message}' #{@build.ref}") unless status.success? Open3.capture2e("git merge --abort") raise_and_log(GitMergeFailedError, "Was unable to merge your branch:", merge_log) end newest_sha, _status = Open3.capture2e("git rev-parse master") [newest_sha.chomp, merge_log] end def push_to_remote push_log, status = Open3.capture2e("git push --porcelain origin master") unless status.success? raise_and_log(GitPushFailedError, "git push of branch #{@build.branch_record.name} failed:", push_log) end push_log end def raise_and_log(error_class, error_info, command_output) message = "#{error_info}\n\n#{command_output}" Rails.logger.error(message) raise(error_class, message) end def merge_env author_name = "kochiku-merger" author_email = "noreply+kochiku-merger@#{Settings.domain_name}" {"GIT_AUTHOR_NAME" => author_name, "GIT_COMMITTER_NAME" => author_name, "GIT_AUTHOR_EMAIL" => author_email, "GIT_COMMITTER_EMAIL" => author_email} end end ================================================ FILE: lib/git_repo.rb ================================================ require 'cocaine' require 'fileutils' class GitRepo class RefNotFoundError < StandardError; end WORKING_DIR = Rails.root.join('tmp', 'build-partition') class << self def inside_copy(repository, sha) cached_repo_path = cached_repo_for(repository) synchronize_cache_repo(cached_repo_path) Dir.mktmpdir(nil, WORKING_DIR) do |dir| Cocaine::CommandLine.new("git clone", "--config remote.origin.pushurl=#{repository.url} #{cached_repo_path} #{dir}").run Dir.chdir(dir) do raise RefNotFoundError, "repo:#{repository.url}, sha:#{sha}" unless system("git rev-list --quiet -n1 #{sha}") Cocaine::CommandLine.new("git checkout", "--quiet :commit").run(commit: sha) yield dir end end end def inside_repo(repository, sync: true) cached_repo_path = cached_repo_for(repository) Dir.chdir(cached_repo_path) do synchronize_with_remote('origin') if sync yield end end def load_kochiku_yml(repository, ref) inside_repo(repository) do raise RefNotFoundError, "repo:#{repository.url}, sha:#{ref}" unless system("git rev-list --quiet -n1 #{ref}") read_repo_config(ref) end end def included_in_promotion_ref?(build_ref, promotion_ref) # --is-ancestor was added in git 1.8.0 # exit -> 1: not an ancestor # exit -> 128: the commit does not exist ancestor_cmd = Cocaine::CommandLine.new("git merge-base", "--is-ancestor #{build_ref} #{promotion_ref}", :expected_outcodes => [0, 1, 128]) ancestor_cmd.run ancestor_cmd.exit_status == 0 end def branch_exist?(branch) exist_cmd = Cocaine::CommandLine.new("git rev-parse", "--verify --quiet #{branch}", expected_outcodes: [0, 1]) exist_cmd.run exist_cmd.exit_status == 0 end private KOCHIKU_YML_LOCS = [ 'kochiku.yml', 'config/kochiku.yml', 'config/ci/kochiku.yml', ].freeze def read_repo_config(ref) command = Cocaine::CommandLine.new("git show", ":ref::file", { :swallow_stderr => true, :expected_outcodes => [0, 128] }) KOCHIKU_YML_LOCS.each do |loc| file = command.run(:ref => ref, :file => loc) return YAML.load(file) if command.exit_status == 0 end nil end def cached_repo_for(repository) cached_repo_path = WORKING_DIR.join(repository.namespace, "#{repository.name}.git") if !cached_repo_path.directory? FileUtils.mkdir_p(WORKING_DIR.join(repository.namespace)) clone_bare_repo(repository, cached_repo_path) else harmonize_remote_url(cached_repo_path, repository.url_for_fetching) end cached_repo_path end # Update the remote url for the git repository if it has changed def harmonize_remote_url(cached_repo_path, expected_url) Dir.chdir(cached_repo_path) do remote_url = Cocaine::CommandLine.new("git config", "--get remote.origin.url").run.chomp if remote_url != expected_url Rails.logger.info "#{remote_url.inspect} does not match #{expected_url.inspect}. Updating it." Cocaine::CommandLine.new("git remote", "set-url origin #{expected_url}").run end end nil end def synchronize_cache_repo(cached_repo_path) Dir.chdir(cached_repo_path) do # update the cached repo synchronize_with_remote('origin') end end def clone_bare_repo(repo, cached_repo_path) # Note: the --config option was added in git 1.7.7 Cocaine::CommandLine.new( "git clone", "--bare --quiet --config remote.origin.pushurl=#{repo.url} --config remote.origin.fetch='+refs/heads/*:refs/heads/*' --config remote.origin.tagopt='--no-tags' #{repo.url_for_fetching} #{cached_repo_path}" ).run end def synchronize_with_remote(name) Cocaine::CommandLine.new("git fetch", "--quiet --prune #{name}").run rescue Cocaine::ExitStatusError => e # likely caused by another 'git fetch' that is currently in progress. Wait a few seconds and try again tries = (tries || 0) + 1 if tries < 3 Rails.logger.warn(e) sleep(15 * tries) retry else raise e end end end end ================================================ FILE: lib/github_commit_status.rb ================================================ require 'github_request' class GithubCommitStatus def initialize(build, oauth_token) @oauth_token = oauth_token @url = "#{build.repository.base_api_url}/statuses/#{build.ref}" @build = build @build_url = Rails.application.routes.url_helpers.repository_build_url(build.repository, build) end def update_commit_status! if @build.succeeded? mark_as("success", "Build passed!") elsif @build.failed? || @build.aborted? mark_as("failure", "Build failed") else mark_as("pending", "Build is running") end end private def mark_as(state, description) GithubRequest.post(@url, {:state => state, :target_url => @build_url, :description => description}, @oauth_token) end end ================================================ FILE: lib/github_post_receive_hook.rb ================================================ # frozen_string_literal: true require 'github_request' class GithubPostReceiveHook SUBSCRIBE_NAME = "web" def initialize(repository, oauth_token) @repository = repository @oauth_token = oauth_token @root_url = "#{repository.base_api_url}/hooks" @hook_url = "#{repository.base_api_url}/hooks/#{repository.github_post_receive_hook_id}" @receive_url = Rails.application.routes.url_helpers.pull_request_build_url @interested_events = @repository.interested_github_events @subscribe_args = {:name => "web", :config => {:url => @receive_url}, :events => @interested_events, :active => true} end def subscribe! if @repository.github_post_receive_hook_id update_repository_hook! else synchronize_or_create! end end private def update_repository_hook! begin GithubRequest.patch(@hook_url, @subscribe_args, @oauth_token) rescue GithubRequest::ResponseError => e if e.response.class == Net::HTTPNotFound create_hook else raise e end end end def synchronize_or_create! begin response_body = GithubRequest.get(@root_url, @oauth_token) existing_hooks = JSON.parse(response_body) existing_subscription = existing_hooks.detect do |hook| hook["active"] && hook["events"] == @interested_events && hook["config"]["url"] == @receive_url end if existing_subscription @repository.update_attributes(:github_post_receive_hook_id => existing_subscription["id"]) return response_body end rescue GithubRequest::ResponseError Rails.logger.info("Failed to get hooks for #{@root_url}") end create_hook end def create_hook GithubRequest.post(@root_url, @subscribe_args, @oauth_token) end end ================================================ FILE: lib/github_request.rb ================================================ require 'uri' require 'net/http' class GithubRequest class ResponseError < RuntimeError attr_accessor :response end def self.get(url, oauth_token) uri = URI(url) request = Net::HTTP::Get.new(uri.request_uri) request["Authorization"] = "token #{oauth_token}" request["Accept"] = "application/vnd.github.v3+json" make_request(uri, request) end def self.post(url, data, oauth_token) uri = URI(url) request = Net::HTTP::Post.new(uri.request_uri) request.body = data.to_json request["Authorization"] = "token #{oauth_token}" request["Accept"] = "application/vnd.github.v3+json" request["Content-Type"] = "application/json; charset=utf-8" make_request(uri, request) end def self.patch(url, data, oauth_token) uri = URI(url) request = Net::HTTP::Patch.new(uri.request_uri) request.body = data.to_json request["Authorization"] = "token #{oauth_token}" request["Accept"] = "application/vnd.github.v3+json" request["Content-Type"] = "application/json; charset=utf-8" make_request(uri, request) end def self.make_request(uri, request_object) Rails.logger.info("Github request: #{request_object.method}, #{uri}") body = nil Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| response = http.request(request_object) body = response.body Rails.logger.info("Github response: #{response.inspect}") Rails.logger.info("Github response body: #{body.inspect}") unless response.is_a? Net::HTTPSuccess response_error = ResponseError.new("response: #{response.class} body: #{body}") response_error.response = response raise response_error end end body end private_class_method :make_request end ================================================ FILE: lib/partitioner/base.rb ================================================ module Partitioner class Base def initialize(build, kochiku_yml) @build = build @kochiku_yml = kochiku_yml end def partitions [ { 'type' => 'test', 'files' => ['no-manifest'], 'queue' => @build.branch_record.convergence? ? 'ci' : 'developer', 'retry_count' => 0 } ] end def emails_for_commits_causing_failures {} end def partitioner_type self.class.name.gsub(/^Partitioner::/, '') end end end ================================================ FILE: lib/partitioner/default.rb ================================================ require 'partitioner/base' module Partitioner # This is the origional partitioner behavior, which is somewhat ruby targeted class Default < Base def partitions GitRepo.inside_copy(@build.repository, @build.ref) do # Handle old kochiku.yml if @kochiku_yml.is_a?(Array) @kochiku_yml.map { |subset| partitions_for(subset) }.flatten else build_partitions end end end private def max_build_time if @kochiku_yml.is_a?(Array) @kochiku_yml else @kochiku_yml.fetch('targets') end.map do |subset| file_to_times_hash = load_manifest(subset['time_manifest']) file_to_times_hash.values if file_to_times_hash.is_a?(Hash) end.flatten.compact.max end def build_partitions if @kochiku_yml['ruby'] @kochiku_yml['ruby'].flat_map do |ruby| build_targets(ruby) end else build_targets end end def build_targets(ruby_version = nil) options = {} options['ruby'] = ruby_version if ruby_version options['log_file_globs'] = Array(@kochiku_yml['log_file_globs']) if @kochiku_yml['log_file_globs'] @kochiku_yml['targets'].flat_map do |subset| partitions_for( subset.merge('options' => options.clone) ) end end def get_file_parts_for(subset) glob = subset.fetch('glob', '/dev/null') manifest = subset['manifest'] workers = subset.fetch('workers', 1) strategy = subset.fetch('balance', 'round_robin') strategy = 'round_robin' unless Strategies.respond_to?(strategy) # override if specified strategy is invalid files = Array(load_manifest(manifest)) | Dir[*glob] file_to_times_hash = load_manifest(subset['time_manifest']) balanced_partitions = if file_to_times_hash.is_a?(Hash) @max_time ||= max_build_time time_greedy_partitions_for(file_to_times_hash, files, workers) else [] end files -= balanced_partitions.flatten Strategies.send(strategy, files, workers) + balanced_partitions end def partitions_for(subset) type = subset.fetch('type', 'test') retry_count = subset['retry_count'] || 0 if subset['log_file_globs'] subset['options']['log_file_globs'] = Array(subset['log_file_globs']) end queue = @build.branch_record.convergence? ? "ci" : "developer" queue_override = subset.fetch('queue_override', nil) queue = "#{queue}-#{queue_override}" if queue_override.present? subset_part_files = subset['target'] ? [Array(subset['target'])] : get_file_parts_for(subset) subset_part_files.map do |part_files| {'type' => type, 'files' => part_files.compact, 'queue' => queue, 'retry_count' => retry_count, 'options' => subset['options']} end.select { |p| p['files'].present? } end # Balance tests by putting each test into the worker with the shortest expected execution time # If a test that no longer exists is referenced in the file_to_times_hash, do not include it in # the list of tests to be executed. If there are new tests not included in the file_to_times_hash, # assume they will run fast. def time_greedy_partitions_for(file_to_times_hash, all_files, workers) # exclude tests that are not present file_to_times_hash.slice!(*all_files) min_test_time = file_to_times_hash.values.flatten.min || 1 setup_time = min_test_time / 2 files_by_worker = [] runtimes_by_worker = [] file_to_times_hash.to_a.sort_by { |a| a.last.max }.reverse_each do |file, times| file_runtime = times.max if runtimes_by_worker.length < workers files_by_worker << [file] runtimes_by_worker << file_runtime else _fastest_worker_time, fastest_worker_index = runtimes_by_worker.each_with_index.min files_by_worker[fastest_worker_index] << file runtimes_by_worker[fastest_worker_index] += file_runtime - setup_time end end # Add any missing files missing_files = all_files - file_to_times_hash.keys files_by_worker = files_by_worker.zip(missing_files.in_groups(workers)).map(&:flatten).map(&:compact) files_by_worker end def load_manifest(file_name) YAML.load_file(file_name) if file_name end module Strategies class << self def alphabetically(files, workers) files.in_groups(workers) end def isolated(files, workers) files.in_groups_of(1) end def round_robin(files, workers) files.in_groups_of(workers).transpose end def shuffle(files, workers) files.shuffle.in_groups(workers) end def size(files, workers) files.sort_by { |path| File.size(path) }.reverse.in_groups_of(workers).transpose end def size_greedy_partitioning(files, workers) files = files.sort_by { |path| 0 - File.size(path) } numbers = (0...workers).to_a results = numbers.map { [] } sizes = numbers.map { 0 } files.each do |file| dest = numbers.sort_by { |n| sizes[n] }.first sizes[dest] += File.size(file) results[dest] << file end return results end def size_average_partitioning(files, workers) threshold = files.sum { |file| File.size(file) } / workers results = [] this_bucket = [] this_bucket_size = 0 files.each do |file| if this_bucket_size > threshold && results.size < workers results << this_bucket this_bucket = [] this_bucket_size = this_bucket_size - threshold end this_bucket << file this_bucket_size += File.size(file) end results << this_bucket return results end end end end end ================================================ FILE: lib/partitioner/dependency_map.rb ================================================ require 'partitioner/default' require 'git_blame' module Partitioner # A variation on Partitioner::Default, which allows builds to run a subset of tests based on the files changed # on its branch. # # Accepts all the same configuration options as Partitioner::Default, and optionally accepts a dependency_map for # each specified test target. # # Sample excerpt from `kochiku.yml` with options: # # ```yml # partitioner: dependency_map # # dependency_map_options: # # Branches with these names will include files that match every test_glob, regardless of files changed on branch # run_all_tests_for_branches: # - master # # targets: # - dependency_map: # # For each object in the dependency_map array, if its source_glob matches files changed on this branch, # # add files that match its test_glob to the files that should be used in the partitions # - source_glob: foo/** # test_glob: foo/**/*spec.rb # workers: 1 # Add this many workers if this source_glob matches files changed on this branch # # - source_glob: # - bar/** # - app/bar/** # test_glob: # - bar/**/*spec.rb # - spec/bar/**/*spec.rb # workers: 5 # # - source_glob: * # test_glob: baz/*spec.rb # # # If a target specifies a default_test_glob and none of its specified source_globs match files changed, # # add files that match its default_test_glob to the files that should be used in the partitions # default_test_glob: # - {foo,bar}/**/*spec.rb # - spec/bar/**/*spec.rb # - baz/*spec.rb # # # Maximum number of workers for this partition # workers: 30 # ``` class DependencyMap < Default private KOCHIKU_YML_LOCS = %w(kochiku.yml config/kochiku.yml config/ci/kochiku.yml).freeze # Indicates whether this build should run all test files, or only those test files which map to source files # that have changed in this branch def should_run_all_tests @should_run_all_tests ||= ( # Run all tests if kochiku.yml is formatted the old way (as an array) return true if @kochiku_yml.is_a?(Array) # Run all tests if this branch name is included in dependency_map_options.run_all_tests_for_branches branches_that_run_all_tests = @kochiku_yml .fetch('dependency_map_options', {}) .fetch('run_all_tests_for_branches', []) [*branches_that_run_all_tests].include?(@build.branch_record.name) ) end # Overrides Partitioner::Default#get_file_parts_for. Decides which test files to include in the partitions # based on dependency_map option in each test target. def get_file_parts_for(subset) glob = subset.fetch('glob', '/dev/null') manifest = subset['manifest'] workers = subset.fetch('workers', 1) strategy = subset.fetch('balance', 'round_robin') strategy = 'round_robin' unless Strategies.respond_to?(strategy) # override if specified strategy is invalid dependency_map = subset['dependency_map'] default_test_glob = subset['default_test_glob'] if dependency_map.present? if should_run_all_tests test_globs_to_add = dependency_map.map { |dependency| dependency.fetch('test_glob', '') } << default_test_glob else test_globs_to_add = [] workers_for_dependency_map = 0 changed_files = GitBlame.net_files_changed_in_branch(@build).map { |file_object| file_object[:file] } # If a source_glob matches the changed files on this branch, add its test_glob to the partition dependency_map.each do |dependency| source_globs = [*dependency.fetch('source_glob', '')] matched_files = changed_files.select { |path| source_globs.any? { |pattern| File.fnmatch(pattern, path) } } unless matched_files.empty? test_globs_to_add << dependency.fetch('test_glob', '') workers_for_dependency_map += dependency.fetch('workers', 0) end end # If no source_globs matched the changed files on this branch, add the default_test_glob to the partition if test_globs_to_add.empty? test_globs_to_add << default_test_glob end # If workers were added for the source_globs that matched, and the total is less than the maximum number # of workers allotted for this target, use that amount of workers to build the partitions if workers_for_dependency_map > 0 workers = [workers, workers_for_dependency_map].min end end test_globs_to_add.flatten! files = Dir[*test_globs_to_add] elsif default_test_glob.present? files = Dir[*default_test_glob] else files = Array(load_manifest(manifest)) | Dir[*glob] end return [] if files.empty? file_to_times_hash = load_manifest(subset['time_manifest']) balanced_partitions = if file_to_times_hash.is_a?(Hash) @max_time ||= max_build_time time_greedy_partitions_for(file_to_times_hash, files, workers) else [] end files -= balanced_partitions.flatten Strategies.send(strategy, files, workers) + balanced_partitions end end end ================================================ FILE: lib/partitioner/go.rb ================================================ # frozen_string_literal: true require 'cocaine' require 'fileutils' require 'json' require 'set' require 'partitioner/base' module Partitioner # This partitioner shards Go repos # Example usage ################################ # partitioner: go # go_partitioner_settings: # ignore_paths: # - kochiku.yml # all_packages: # test: # # All others will run on 1 worker # items : 4 # inventory2: 2 # custom_go: 4 # top_level_packages: # build: 4 # static_analysis: 1 # package_prefix: "square/up" ################################ class Go < Base class DependencyError < StandardError; end def initialize(build, kochiku_yml) @build = build @options = {} @settings = {} if kochiku_yml @settings = kochiku_yml['go_partitioner_settings'] if kochiku_yml['go_partitioner_settings'] @options['log_file_globs'] = Array(kochiku_yml['log_file_globs']) if kochiku_yml['log_file_globs'] @options['retry_count'] = kochiku_yml['retry_count'] if kochiku_yml['retry_count'] end # Go package prefix (e.g., "square/up"). @package_prefix = @settings['package_prefix'] ? File.join(@settings['package_prefix'], '') : '' end def partitions Rails.logger.info("Partition started: [#{all_packages_target_types} #{top_level_packages_target_types}] #{@build.ref}") start = Time.current packages_to_build = [] files_changed_method = @build.branch_record.convergence? ? :files_changed_since_last_build : :files_changed_in_branch GitBlame.public_send(files_changed_method, @build, sync: false).each do |file_and_emails| file_path = file_and_emails[:file] next if @settings.fetch('ignore_paths', []).detect { |dir| file_path.start_with?(dir) } # build all for top level file changes dir_path = File.dirname(file_path) return add_partitions(all_packages) if dir_path == "." packages_to_build += file_to_packages(file_path) end packages_to_build += failed_convergence_tests add_partitions(packages_to_build.uniq) ensure Rails.logger.info("Partition finished: [#{all_packages_target_types} #{top_level_packages_target_types}] #{Time.current - start} #{@build.ref}") end def file_to_packages(file_path) dir_path = File.dirname(file_path) if file_path.end_with? '.go' path_affected_by_file = @package_prefix + dir_path return Array(depends_on_map[path_affected_by_file]) # if its not a go file run all tests in top-level package else top_level_package = dir_path.split("/").first return Array(top_level_package_map[@package_prefix + top_level_package]) end end def failed_convergence_tests # add in the packages that failed previously if its the convergence branch if @build.branch_record.convergence? && @build.previous_build previous_failures = @build.previous_build.build_parts.select(&:unsuccessful?).map(&:paths).flatten.uniq previous_failures.map! { |path| @package_prefix + path } end previous_failures || [] end # Run for each packages. def all_packages_target_types @all_packages_target_types ||= @settings['all_packages'] || {test: 1} end # Run only for the top-level package def top_level_packages_target_types @top_level_packages_target_types ||= @settings['top_level_packages'] || {build: 1} end def all_packages @all_packages ||= package_dependency_map.keys.select do |m| m.start_with?(@package_prefix) && !m.start_with?(File.join(@package_prefix, 'vendor')) end end def top_level_package_map @top_level_package_map ||= filter_test(all_packages).group_by { |package| package.match(%r{^#{@package_prefix}+[^\/]*})[0] } end # Group folders by their top-level package name. def package_folders_map(packages) package_folders_map = filter_test(packages).group_by { |package| package.match(%r{^#{@package_prefix}+[^\/]*})[0] } package_folders_map.each { |k, v| package_folders_map[k] = v.map { |vv| package_to_folder(vv) } } end def package_to_folder(package) File.join('.', package.gsub(/^#{@package_prefix}/, ""), '') end def filter_test(packages) packages.reject { |pack| pack.match(/_test$/) } end def package_dependency_map return @package_dependency_map if @package_dependency_map @package_dependency_map = {} package_info_map.each do |import_path, package_info| # Add itself? @package_dependency_map[import_path] ||= Set.new @package_dependency_map[import_path].add(import_path) imports = [] imports.concat(package_info["Imports"]) unless package_info["Imports"].nil? imports.concat(package_info["TestImports"]) unless package_info["TestImports"].nil? imports.each do |import| @package_dependency_map[import] ||= Set.new @package_dependency_map[import].add(import_path) end xtest_imports = package_info["XTestImports"] next if xtest_imports.nil? # Add itself? test_import_path = import_path + '_test' @package_dependency_map[test_import_path] ||= Set.new @package_dependency_map[test_import_path].add(test_import_path) xtest_imports.each do |import| @package_dependency_map[import] ||= Set.new @package_dependency_map[import].add(test_import_path) end end @package_dependency_map end def depends_on_map return @depends_on_map if @depends_on_map # Create a map on transitive non-test dependency # and a map on direct test dependency. tmp_depends_on_map = {} test_dep_map = {} package_info_map.each do |import_path, package_info| # Add itself? tmp_depends_on_map[import_path] ||= Set.new tmp_depends_on_map[import_path].add(import_path) deps = package_info["Deps"] deps&.each do |dep| tmp_depends_on_map[dep] ||= Set.new tmp_depends_on_map[dep].add(import_path) end test_imports = package_info["TestImports"] test_imports&.each do |import| test_dep_map[import] ||= Set.new test_dep_map[import].add(import_path) end xtest_imports = package_info["XTestImports"] next if xtest_imports.nil? # Add itself? test_import_path = import_path + '_test' tmp_depends_on_map[test_import_path] ||= Set.new tmp_depends_on_map[test_import_path].add(test_import_path) xtest_imports.each do |import| test_dep_map[import] ||= Set.new test_dep_map[import].add(test_import_path) end end @depends_on_map = {} tmp_depends_on_map.each do |import_path, deps| @depends_on_map[import_path] = Set.new deps.each do |dep| @depends_on_map[import_path].add(dep) test_deps = test_dep_map[dep] next if test_deps.nil? test_deps.each do |test_dep| @depends_on_map[import_path].add(test_dep) end end end @depends_on_map end def package_info_map return @package_info_map if @package_info_map @package_info_map = {} GitRepo.inside_copy(@build.repository, @build.ref) do |dir| # Relocate all the code in src/#{@package_prefix} # Apparently, go list generates bad package names if we don't do this. src_dir = FileUtils.mkdir_p(File.join(dir, "src", @package_prefix))[0] Cocaine::CommandLine.new("mv $(git ls-tree --name-only HEAD) #{src_dir}").run # Run "go list". Note that the output is NOT a valid single # JSON value, but multiple JSON values. See https://github.com/golang/go/issues/12643. begin outputs = Cocaine::CommandLine.new("GOPATH=#{dir} go list -json ./...").run rescue Cocaine::ExitStatusError => e raise DependencyError, "error running 'go list -json ./...' \n\n #{e.message}" end l = outputs[1..-3].split("}\n{") l.each do |blob| package_info = JSON.parse("{" + blob + "}") import_path = package_info["ImportPath"] @package_info_map[import_path] = package_info end end @package_info_map end def add_partitions(packages) @partition_list = [] package_map = package_folders_map(packages) all_packages_target_types.each do |target_type, workers| if workers.is_a?(Hash) package_map.each do |package, folders| worker_number = workers[package.gsub(/^#{@package_prefix}/, "")] add_with_split(folders, target_type, worker_number) end elsif workers.is_a?(Integer) add_with_split(package_map.map { |_, v| v }.flatten.uniq, target_type, workers) end end top_level_packages_target_types.each do |target_type, workers| if workers.is_a?(Hash) package_map.each do |package, folders| worker_number = workers[package.gsub(/^#{@package_prefix}/, "")] add_with_split(folders, target_type, worker_number) end elsif workers.is_a?(Integer) add_with_split(package_map.map { |k, _| package_to_folder(k) }.uniq, target_type, workers) end end @partition_list end def add_with_split(package_list, target_type, workers) return if package_list.size.zero? if workers split_size = (package_list.size / workers.to_f).ceil Array(package_list).each_slice(split_size).to_a.each do |chunk| @partition_list << partition_info(chunk, target_type) end else @partition_list << partition_info(package_list, target_type) end end def partition_info(packages, type) queue = @build.branch_record.convergence? ? 'ci' : 'developer' queue_override = @settings.fetch('queue_overrides', []).detect do |override| override['queue'] if override['paths']&.detect { |path| packages.include? path } end queue = "#{queue}-#{queue_override['queue']}" if queue_override.present? { 'type' => type, 'files' => packages&.sort!, 'queue' => queue, 'retry_count' => @options.fetch('retry_count', 0), 'options' => @options } end end end ================================================ FILE: lib/partitioner/maven.rb ================================================ # frozen_string_literal: true require 'nokogiri' require 'set' require 'partitioner/base' require 'partitioner/topological_sorter' module Partitioner # This partitioner uses knowledge of Maven to shard large java repos class Maven < Base POM_XML = 'pom.xml' def initialize(build, kochiku_yml) @build = build @options = {} if kochiku_yml @settings = kochiku_yml['maven_settings'] if kochiku_yml['maven_settings'] @options['log_file_globs'] = Array(kochiku_yml['log_file_globs']) if kochiku_yml['log_file_globs'] @options['retry_count'] = kochiku_yml['retry_count'] if kochiku_yml['retry_count'] end @settings ||= {} end def partitions Rails.logger.info("Partition started: [maven] #{@build.ref}") start = Time.current modules_to_build = Set.new GitRepo.inside_copy(@build.repository, @build.ref) do @settings.fetch('always_build', []).each do |maven_module| modules_to_build.add(maven_module) end files_changed_method = @build.branch_record.convergence? ? :files_changed_since_last_build : :files_changed_in_branch GitBlame.public_send(files_changed_method, @build, sync: false).each do |file_and_emails| next if @settings.fetch('ignore_paths', []).detect { |dir| file_and_emails[:file].start_with?(dir) } module_affected_by_file = file_to_module(file_and_emails[:file]) if module_affected_by_file.nil? || @settings.fetch('build_everything', []).detect { |dir| file_and_emails[:file].start_with?(dir) } return add_options(all_partitions) else modules_to_build.merge(depends_on_map[module_affected_by_file] || Set.new) end end if @build.branch_record.convergence? && @build.previous_build modules_to_build.merge(@build.previous_build.build_parts.select(&:unsuccessful?).map(&:paths).flatten.uniq) end add_options(group_modules(sort_modules(modules_to_build))) end ensure # TODO: log this information to event stream Rails.logger.info("Partition finished: [maven] #{Time.current - start} #{@build.ref}") end def emails_for_commits_causing_failures return {} unless @build.branch_record.convergence? failed_modules = @build.build_parts.failed_or_errored.each_with_object(Set.new) do |build_part, failed_set| build_part.paths.each { |path| failed_set.add(path) } end email_and_files = Hash.new { |hash, key| hash[key] = [] } GitRepo.inside_copy(@build.repository, @build.ref) do GitBlame.files_changed_since_last_green(@build, fetch_emails: true).each do |file_and_emails| file = file_and_emails[:file] emails = file_and_emails[:emails] module_affected_by_file = file_to_module(file_and_emails[:file]) if module_affected_by_file.nil? || @settings.fetch('build_everything', []).detect { |dir| file_and_emails[:file].start_with?(dir) } emails.each { |email| email_and_files[email] << file } elsif (set = depends_on_map[module_affected_by_file]) && !set.intersection(failed_modules).empty? emails.each { |email| email_and_files[email] << file } end end end email_and_files.each_key { |email| email_and_files[email].sort!.uniq! } email_and_files end # Everything below this line should be private def maven_modules return @maven_modules if @maven_modules top_level_pom = Nokogiri::XML(File.read(POM_XML)) @maven_modules = top_level_pom.css('project>modules>module').map { |mvn_module| mvn_module.text } end def all_partitions group_modules(sort_modules(maven_modules)) end def pom_for(mvn_module) Nokogiri::XML(File.read("#{mvn_module}/pom.xml")) end def add_options(group_modules) # create multiple entries for builds specifying multiple workers, assigning # distinct test chunks to each group_modules.flat_map do |group| multiple_workers_list = @settings.fetch('multiple_workers', {}) multiple_workers_module = multiple_workers_list.keys.detect do |path| group['files'].include? path end need_multiple_workers = multiple_workers_module.present? if need_multiple_workers total_workers = multiple_workers_list[multiple_workers_module] (1..total_workers).map { |worker_chunk| new_group = group.clone new_options = @options.clone new_options['total_workers'] = total_workers new_options['worker_chunk'] = worker_chunk new_group['options'] = new_options new_group } else group['options'] = @options group end end end def group_modules(mvn_modules) expanding_dirs = @settings.fetch('expand_directories', []) mvn_modules.group_by do |m| split_dirs = m.split("/") if expanding_dirs.include? split_dirs.first "#{split_dirs[0]}/#{split_dirs[1]}" else split_dirs.first end end.values.map { |modules| partition_info(modules) } end def sort_modules(mvn_modules) sorted_modules = Partitioner::TopologicalSorter.new(module_dependency_map).tsort sorted_modules.delete_if { |mvn_module| !mvn_modules.include?(mvn_module) } end def partition_info(mvn_modules) queue = @build.branch_record.convergence? ? 'ci' : 'developer' queue_override = @settings.fetch('queue_overrides', []).detect do |override| override['queue'] if override['paths'].detect { |path| mvn_modules.include? path } end queue = "#{queue}-#{queue_override['queue']}" if queue_override.present? { 'type' => 'maven', 'files' => mvn_modules.sort!, 'queue' => queue, 'retry_count' => @options.fetch('retry_count', 0) } end def depends_on_map return @depends_on_map if @depends_on_map module_depends_on_map = {} transitive_dependency_map.each do |mvn_module, dep_set| module_depends_on_map[mvn_module] ||= Set.new module_depends_on_map[mvn_module].add(mvn_module) dep_set.each do |dep| module_depends_on_map[dep] ||= Set.new module_depends_on_map[dep].add(dep) module_depends_on_map[dep].add(mvn_module) end end @depends_on_map = module_depends_on_map end def module_dependency_map return @module_dependency_map if @module_dependency_map group_artifact_map = {} maven_modules.each do |mvn_module| module_pom = pom_for(mvn_module) group_id = module_pom.css('project>groupId').first artifact_id = module_pom.css('project>artifactId').first next unless group_id && artifact_id group_id = group_id.text artifact_id = artifact_id.text group_artifact_map["#{group_id}:#{artifact_id}"] = mvn_module.to_s end @module_dependency_map = {} maven_modules.each do |mvn_module| module_pom = pom_for(mvn_module) @module_dependency_map[mvn_module] ||= Set.new module_pom.css('project>dependencies>dependency').each do |dep| group_id = dep.css('groupId').first artifact_id = dep.css('artifactId').first raise "dependency in #{mvn_module}/pom.xml is missing an artifactId or groupId" unless group_id && artifact_id if (mod = group_artifact_map["#{group_id.text}:#{artifact_id.text}"]) module_dependency_map[mvn_module].add(mod) end end end @module_dependency_map end def transitive_dependency_map @transitive_dependency_map ||= begin module_dependency_map.each_with_object({}) do |(mvn_module, _), dep_map| dep_map[mvn_module] = transitive_dependencies(mvn_module, module_dependency_map) end end end def transitive_dependencies(mvn_module, dependency_map) result_set = Set.new to_process = [mvn_module] while (dep_module = to_process.shift) deps = dependency_map[dep_module].to_a to_process += (deps - result_set.to_a) result_set << dep_module end result_set end def file_to_module(file_path) dir_path = file_path while (dir_path = File.dirname(dir_path)) != "." return dir_path if File.exist?("#{dir_path}/pom.xml") end nil end end end ================================================ FILE: lib/partitioner/topological_sorter.rb ================================================ require 'tsort' module Partitioner class TopologicalSorter include TSort def initialize(dependency_map) @dependency_map = dependency_map end def tsort_each_node(&block) @dependency_map.each_key(&block) end def tsort_each_child(project, &block) @dependency_map.fetch(project).each(&block) end end end ================================================ FILE: lib/partitioner.rb ================================================ require 'partitioner/base' require 'partitioner/maven' require 'partitioner/default' require 'partitioner/dependency_map' module Partitioner def self.for_build(build) kochiku_yml = build.kochiku_yml if kochiku_yml start = Time.current res = case kochiku_yml['partitioner'] when 'maven' Partitioner::Maven.new(build, kochiku_yml) when 'dependency_map' Partitioner::DependencyMap.new(build, kochiku_yml) when 'go' Partitioner::Go.new(build, kochiku_yml) else # Default behavior Partitioner::Default.new(build, kochiku_yml) end finish = Time.current diff = finish - start Rails.logger.info("Partition finished: [#{kochiku_yml['partitioner'] || 'DEFAULT'}] #{diff} #{build.ref}") res else # This should probably raise Partitioner::Base.new(build, kochiku_yml) end end end ================================================ FILE: lib/remote_server/github.rb ================================================ require 'github_commit_status' require 'github_post_receive_hook' require 'github_request' require 'git_merge_executor' module RemoteServer # All integration with Github must go via this class. class Github URL_PARSERS = [ %r{\Agit@(?[^:]*):(?[^\/]*)/(?[-.\w]+?)(\.git)?\z}, # git@ %r{\Agit://(?[^\/]*)/(?[^\/]*)/(?[-.\w]+)\.git\z}, # git:// (GHE only) %r{\Ahttps?://(?[^\/]*)/(?[^\/]*)/(?[-.\w]+?)(\.git)?\z}, # https:// ].freeze def initialize(url, server) @url = url @settings = server attributes # force url parsing end def attributes @attributes ||= begin parser = URL_PARSERS.detect { |regexp| @url =~ regexp } raise UnknownUrlFormat, "Do not recognize #{@url} as a github URL." unless parser match = @url.match(parser) { host: match[:host], repository_namespace: match[:username], repository_name: match[:name], possible_hosts: [@settings.host, *@settings.aliases].compact, }.freeze end end # Class to use for merge methods def merge_executor GitMergeExecutor end # Public: Returns a url for the remote repo in the format Kochiku prefers # for Github, which is the SSH format. def canonical_repository_url "git@#{@settings.host}:#{attributes[:repository_namespace]}/#{attributes[:repository_name]}.git" end def url_for_compare(first_commit_hash, second_commit_hash) "#{base_html_url}/compare/#{first_commit_hash}...#{second_commit_hash}#files_bucket" end # Where to fetch from: git mirror if defined, # otherwise the canonical url def url_for_fetching if @settings.mirror.present? canonical_repository_url.gsub(%r{(git@|https://).*?(:|/)}, @settings.mirror) else canonical_repository_url end end def sha_for_branch(branch) response_body = GithubRequest.get("#{base_api_url}/git/refs/heads/#{branch}", @settings.oauth_token) branch_info = JSON.parse(response_body) sha = nil if branch_info['object'] && branch_info['object']['sha'].present? sha = branch_info['object']['sha'] end sha rescue GithubRequest::ResponseError raise RefDoesNotExist, "Could not locate ref #{branch} on remote git server" end def update_commit_status!(build) GithubCommitStatus.new(build, @settings.oauth_token).update_commit_status! end def install_post_receive_hook!(repo) GithubPostReceiveHook.new(repo, @settings.oauth_token).subscribe! end def base_api_url if @url =~ /github\.com/ "https://api.#{attributes[:host]}/repos/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}" else # github enterprise "https://#{attributes[:host]}/api/v3/repos/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}" end end def href_for_commit(sha) "#{base_html_url}/commit/#{sha}" end def base_html_url "https://#{attributes[:host]}/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}" end def get_branch_url(branch_name) "#{base_html_url}/tree/#{branch_name}" end def open_pull_request_url(branch_name) "#{base_html_url}/pull/new/master...#{branch_name}" end end end ================================================ FILE: lib/remote_server/stash.rb ================================================ require 'cgi' require 'stash_merge_executor' module RemoteServer class StashAPIError < StandardError; end # All integration with Stash must go via this class. class Stash attr_reader :stash_request URL_PARSERS = [ %r{\Agit@(?[^:]*):(?[^\/]*)/(?[-.\w]+)\.git\z}, %r{\Assh://git@(?[^\/]*?)(?:\d+)?/(?[^\/]*)/(?[-.\w]+)\.git\z}, %r{\Ahttps://(?[^@\/]+)/scm/(?[^\/]*)/(?[-.\w]+)\.git\z}, %r{\Ahttps://(?[^@\/]+)/projects/(?[^\/]*)/repos/(?[-.\w]+)/browse\z} ].freeze def initialize(url, server) @url = url @settings = server attributes # force url parsing @stash_request = StashRequest.new(@settings) end def attributes @attributes ||= begin parser = URL_PARSERS.detect { |regexp| @url =~ regexp } raise UnknownUrlFormat, "Do not recognize #{@url} as a Stash url." unless parser match = @url.match(parser) attributes = { host: match[:host], repository_namespace: match[:username], repository_name: match[:name], possible_hosts: [@settings.host, *@settings.aliases].compact, } if match.names.include?('port') && match['port'].present? attributes[:port] = match[:port].delete(':') end attributes.freeze end end # Class to use for merge methods def merge_executor StashMergeExecutor end # Public: Returns a url for the remote repo in the format Kochiku prefers # for Stash, which is the HTTPS format. def canonical_repository_url "https://#{@settings.host}/scm/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}.git" end # Currently, stash does not support comparison between two arbitrary hashes-- it only supports comparison # between heads of branches. # For now, we return the comparison of HEAD at refs/heads/master and the branch for the green_builds, if any def url_for_compare(first_commit_branch, second_commit_branch) if second_commit_branch.blank? "#{base_html_url}/compare/commits?targetBranch=refs%2Fheads%2Fmaster" else "#{base_html_url}/compare/commits?targetBranch=#{second_commit_branch}&sourceBranch=refs%2Fheads%2Fmaster" end end # Where to fetch from: git mirror if defined, # otherwise the canonical url def url_for_fetching if @settings.mirror.present? canonical_repository_url.gsub(%r{(git@|https://).*?(:|/)}, @settings.mirror) else canonical_repository_url end end def sha_for_branch(branch) return branch if branch =~ /\A[0-9a-f]{40}\Z/ response_body = @stash_request.get(base_api_url + "/commits?until=#{CGI.escape(branch)}&limit=1") response = JSON.parse(response_body) branch_data = response["values"][0] || {} branch_data['id'] rescue => e case e.message.split(" ")[0] when "Net::HTTPNotFound", "Net::HTTPBadRequest" raise RefDoesNotExist, "Could not locate ref #{branch} on remote git server" else raise e end end def update_commit_status!(build) build_url = Rails.application.routes.url_helpers.repository_build_url(build.repository.to_param, build) @stash_request.post "https://#{@settings.host}/rest/build-status/1.0/commits/#{build.ref}", { state: stash_status_for(build), key: 'kochiku', name: "kochiku-#{build.id}", url: build_url, description: "" } end def install_post_receive_hook!(repo) # Unimplemented end def base_api_url "https://#{@settings.host}/rest/api/1.0/projects/#{attributes[:repository_namespace]}/repos/#{attributes[:repository_name]}" end def base_html_url "https://#{@settings.host}/projects/#{attributes[:repository_namespace].upcase}/repos/#{attributes[:repository_name]}" end def href_for_commit(sha) "#{base_html_url}/commits/#{sha}" end def get_branch_url(branch_name) "#{base_html_url}?at=refs/heads/#{branch_name}" end def open_pull_request_url(branch_name) "#{base_html_url}/compare/commits?sourceBranch=refs/heads/#{branch_name}" end # uses the stash REST api to merge a pull request # raises StashAPIError if an error occurs # otherwise returns true or false depending on whether merge succeeds # # TODO pass in expected SHA for head to branch to prevent merging a branch # that is in an unexpected state def merge(branch) @pr_ids ||= get_pr_id_and_version(branch) pr_id, pr_version = @pr_ids success = can_merge?(pr_id) && perform_merge(pr_id, pr_version) if success Rails.logger.info("Request to stash to merge PR #{pr_id} for branch #{branch} succeeded.") else Rails.logger.warn("Request to stash to merge PR #{pr_id} for branch #{branch} failed.") end success end def head_commit(branch) @pr_ids ||= get_pr_id_and_version(branch) pr_id, _pr_version = @pr_ids response = @stash_request.get("#{base_api_url}/pull-requests/#{pr_id}/commits") pr_commits = JSON.parse(response) raise StashAPIError, pr_commits["errors"] if pr_commits["errors"].present? pr_commits["values"].first["id"] end # uses the stash REST api to delete a branch # raises StashAPIError if error occurs, else return true def delete_branch(branch, dryRun = false) url = "https://#{attributes[:host]}/rest/branch-utils/1.0/projects/#{attributes[:repository_namespace]}/repos/#{attributes[:repository_name]}/branches" delete_params = { "name" => branch, "dryRun" => dryRun } response = @stash_request.delete(url, delete_params) if response jsonbody = JSON.parse(response) raise StashAPIError, jsonbody["errors"].to_s if jsonbody["errors"] end true end # return PR id, version number if a single PR exists for corresponding branch # else raise StashAPIError def get_pr_id_and_version(branch) url = "#{base_api_url}/pull-requests?direction=outgoing&at=refs/heads/#{branch}&state=open&limit=25" response = @stash_request.get(url) jsonbody = JSON.parse(response) raise StashAPIError if jsonbody["errors"].present? || jsonbody["size"] != 1 return jsonbody["values"][0]["id"], jsonbody["values"][0]["version"] end private # use stash REST api to query if a merge is possible # raise StashAPIError if an error in API response, else return true def can_merge?(pr_id) url = "#{base_api_url}/pull-requests/#{pr_id}/merge" response = @stash_request.get(url) jsonbody = JSON.parse(response) raise StashAPIError if jsonbody["errors"] if jsonbody["canMerge"] == false Rails.logger.info("Could not merge PR #{pr_id}:") Rails.logger.info("conflicted: #{jsonbody["conflicted"]}") Rails.logger.info("vetoes: #{jsonbody["vetoes"]}") return false end true end # use stash REST api to merge PR # raise StashAPIError if an error in API response, else return true def perform_merge(pr_id, pr_version) url = "#{base_api_url}/pull-requests/#{pr_id}/merge?version=#{pr_version}" response = @stash_request.post(url, nil) jsonbody = JSON.parse(response) raise StashAPIError if jsonbody["errors"] true end def stash_status_for(build) if build.succeeded? 'SUCCESSFUL' elsif build.failed? || build.aborted? 'FAILED' else 'INPROGRESS' end end class StashRequest def initialize(settings) @settings = settings end # TODO: Configure OAuth def setup_auth!(req) req.basic_auth(@settings.stash_username, @settings.stash_password) end def get(url) Rails.logger.info("Stash GET: #{url}") get = Net::HTTP::Get.new(url) setup_auth! get make_request(get, URI(url)) end def post(url, body = {}) Rails.logger.info("Stash POST: #{url}, #{body}") post = Net::HTTP::Post.new(url, {'Content-Type' => 'application/json'}) setup_auth! post post.body = body.to_json make_request(post, URI(url)) end def delete(url, body) Rails.logger.info("Stash DELETE: #{url}, #{body}") delete_request = Net::HTTP::Delete.new(url, {'Content-Type' => 'application/json'}) setup_auth! delete_request body ||= {} delete_request.body = body.to_json make_request(delete_request, URI(url)) end def make_request(method, url, args = []) uri = URI(url) body = nil Net::HTTP.start(uri.host, uri.port, :use_ssl => true) do |http| response = http.request(method, *args) body = response.body Rails.logger.info("Stash response: #{response.inspect}") Rails.logger.info("Stash response body: #{body.inspect}") unless response.is_a?(Net::HTTPSuccess) if response.is_a?(Net::HTTPUnauthorized) raise RemoteServer::AccessDenied.new(url, method, body) else raise "#{response.class} body: #{body}" end end end body end end end end ================================================ FILE: lib/remote_server.rb ================================================ require 'remote_server/github' require 'remote_server/stash' module RemoteServer UnknownGitServer = Class.new(RuntimeError) UnknownUrlFormat = Class.new(RuntimeError) RefDoesNotExist = Class.new(RuntimeError) class AccessDenied < StandardError def initialize(url, action, original_message = nil) @url = url @action = action @original_message = original_message end def to_s "Authorization failure when attempting to call #{@url} via #{@action}" end end def self.for_url(url) server = Settings.git_server(url) raise UnknownGitServer, url unless server case server.type when 'stash' RemoteServer::Stash.new(url, server) when 'github' RemoteServer::Github.new(url, server) else raise UnknownGitServer, "No implementation for server type #{type}" end end def self.parseable_url?(url) (RemoteServer::Stash::URL_PARSERS + RemoteServer::Github::URL_PARSERS).any? do |format| url =~ format end end def self.valid_git_host?(url) !!Settings.git_server(url) end end ================================================ FILE: lib/server_settings.rb ================================================ class ServerSettings attr_reader :type, :oauth_token, :stash_username, :stash_password, :mirror, :host, :aliases def initialize(raw_settings, host) @host = host if raw_settings @type = raw_settings[:type] @mirror = raw_settings[:mirror] @aliases = raw_settings[:aliases] # specific to Github if raw_settings[:oauth_token_file] @oauth_token = File.read(raw_settings[:oauth_token_file]).chomp end # specific to Stash @stash_username = raw_settings[:username] if raw_settings[:password_file] @stash_password = File.read(raw_settings[:password_file]).chomp end else @type = nil @mirror = nil @aliases = nil @oauth_token = nil @stash_username = nil @stash_password = nil end end end ================================================ FILE: lib/settings_accessor.rb ================================================ require 'yaml' require 'active_support/core_ext/hash/indifferent_access' require 'server_settings' class SettingsAccessor def initialize(yaml) @hash = YAML.load(yaml).with_indifferent_access end def sender_email_address @hash[:sender_email_address] end def kochiku_notifications_email_address @hash[:kochiku_notifications_email_address] end def domain_name @hash[:domain_name] end def kochiku_protocol @hash[:use_https] ? "https" : "http" end def kochiku_host @hash[:kochiku_host] end def kochiku_host_with_protocol "#{kochiku_protocol}://#{kochiku_host}" end def git_servers @git_servers ||= begin raw_servers = @hash[:git_servers] if raw_servers raw_servers.each_with_object({}) do |(host, settings_for_server), result| result[host] = ServerSettings.new(settings_for_server, host) end else {} end end end def git_server(url) git_servers.values.detect do |server| url.include?(server.host) || (server.aliases && server.aliases.detect { |a| url.include?(a) }) end end def smtp_server @hash[:smtp_server] end def redis_host @hash[:redis_host] end def redis_port @hash.fetch(:redis_port, 6379) end def git_pair_email_prefix @hash[:git_pair_email_prefix] end end ================================================ FILE: lib/stash_merge_executor.rb ================================================ require 'open3' require 'git_merge_executor' class StashMergeExecutor < GitMergeExecutor # Merges the branch associated with a build using the Stash REST api. def merge_and_push remote_server = @build.repository.remote_server Rails.logger.info("Trying to merge branch #{@build.branch_record.name} after build id #{@build.id} using Stash REST api") branch_name = @build.branch_record.name head_commit = remote_server.head_commit(branch_name) merge_success = remote_server.merge(branch_name) unless merge_success Rails.logger.info("Merge of #{@build.branch_record.name} failed.") raise GitMergeFailedError end { merge_commit: head_commit, log_output: "Successfully merged #{@build.branch_record.name}" } end # Delete branch associated with a build using Stash REST api. def delete_branch remote_server = @build.repository.remote_server begin Rails.logger.info("Trying to delete branch using Stash REST api") remote_server.delete_branch(@build.branch_record.name) rescue RemoteServer::StashAPIError => e Rails.logger.warn("Deletion of branch #{@build.branch_record.name} failed") Rails.logger.warn(e.message) end end end ================================================ FILE: lib/tasks/.gitkeep ================================================ ================================================ FILE: lib/tasks/kochiku.rake ================================================ namespace :kochiku do desc "Generates time_manifests for a collection of builds; invoke with `rake kochiku:generate_time_manifests['1 2 3 4']`" task :generate_time_manifests, [:ids] => [:environment] do |_, args| build_ids = args.ids.split build_ids.flat_map do |build_id| Build.includes(build_parts: :build_attempts).find(build_id).build_parts end.group_by(&:kind).map do |kind, parts| if parts.map(&:paths).uniq.length > 1 File.open("#{kind}_time_manifest.yml", 'w') do |io| YAML.dump( Hash[ parts.group_by(&:paths).map do |paths, paths_parts| [paths.join, paths_parts.map(&:elapsed_time)] end ], io ) end end end end end ================================================ FILE: lib/tasks/resque.rake ================================================ require 'resque/tasks' require 'resque/scheduler/tasks' namespace :resque do task :setup => :environment task :setup_schedule => :setup do require 'resque-scheduler' Resque.schedule = YAML.load_file('config/resque_schedule.yml') end task :scheduler => :setup_schedule end ================================================ FILE: public/404.html ================================================ The page you were looking for doesn't exist (404)

The page you were looking for doesn't exist.

You may have mistyped the address or the page may have moved.

================================================ FILE: public/422.html ================================================ The change you wanted was rejected (422)

The change you wanted was rejected.

Maybe you tried to change something you didn't have access to.

================================================ FILE: public/500.html ================================================ We're sorry, but something went wrong (500)

We're sorry, but something went wrong.

We've been notified about this issue and we'll take a look at it shortly.

================================================ FILE: public/robots.txt ================================================ # See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file # # To ban all spiders from the entire site uncomment the next two lines: # User-Agent: * # Disallow: / ================================================ FILE: script/ci ================================================ #!/usr/bin/env bash echo Command: $0 $* echo echo "Environment:" env echo # set -ex: make script verbose and exit on first failed command set -ex gem install bundler -v '>= 1.5.2' --conservative bundle check || bundle bundle exec rake db:create db:schema:load RAILS_ENV=test bundle exec rspec bundle exec rubocop bundle exec haml-lint app/views/ ================================================ FILE: script/kochiku-build.sh.sample ================================================ #!/usr/bin/env ruby # This script can be used to initiate a kochiku build of the current branch. # The script does not upload any code so the branch must be pushed to the remote before the script is executed. # # In order to use: # - copy the script to your local machine # - set KOCHIKU_HOST to the host where Kochiku is running KOCHIKU_HOST = 'https://kochiku.example.com' require 'net/https' require 'uri' require 'shellwords' # Merge on success requested? if ARGV.delete("--merge") merge_on_success = "1" end # If a ref is given as an argument, use that. Otherwise use the current commit if ARGV[0] commit_ish = Shellwords.escape(ARGV[0]) branch = "" else commit_ish = "HEAD" branch = `git rev-parse --abbrev-ref HEAD`.strip end ref = `git rev-parse #{commit_ish}`.strip # verify that the ref exists on origin git_branch_contains = `git branch -r --contains #{ref}` # git branch --contains will return a non-zero exit code if it does not recognize # the sha. It will return a 0 exit code and no output if the ref only exists locally if ($? == 0 && git_branch_contains.empty?) || $? != 0 puts "Failed: please push #{ref}" exit(1) end repo_url = `git config --get remote.origin.url`.strip uri = URI.parse("#{KOCHIKU_HOST}/builds") params = { 'git_sha' => ref, 'git_branch' => branch, 'merge_on_success' => merge_on_success, 'repo_url' => repo_url, } http = Net::HTTP.new(uri.host, uri.port) http.use_ssl = true request = Net::HTTP::Post.new(uri.request_uri) request.set_form_data(params) response = http.request(request) if response.code.to_i >= 400 puts "#{response.code} #{response.message}:" puts response.body exit(1) else puts response['location'] `open -g #{response['location']}` end ================================================ FILE: spec/controllers/branches_controller_spec.rb ================================================ require 'spec_helper' require 'rexml/document' describe BranchesController do render_views describe "#index" do let(:repo) { FactoryBot.create(:repository) } let!(:a) { FactoryBot.create(:branch, name: 'aster', repository: repo, convergence: true) } let!(:b) { FactoryBot.create(:branch, name: 'buckeye', repository: repo, updated_at: 30.minutes.ago) } let!(:c) { FactoryBot.create(:branch, name: 'creosote', repository: repo, updated_at: 15.minutes.ago) } it "shows branches in order" do get :index, params: { repository_path: repo } expect(assigns(:convergence_branches).map(&:name)).to eq(%w{aster}) expect(assigns(:recently_active_branches).map(&:name)).to eq(%w{creosote buckeye}) end end describe "#show" do let(:branch) { FactoryBot.create(:branch) } let!(:build1) { FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded', :test_command => "script/ci") } let!(:build2) { FactoryBot.create(:build, :branch_record => branch, :state => 'errored', :test_command => "script/ci") } it "should return an rss feed of builds" do get :show, params: { repository_path: branch.repository, id: branch, format: :rss } doc = REXML::Document.new(response.body) items = doc.elements.to_a("//channel/item") expect(items.length).to eq(Build.count) expect(items.first.elements.to_a("title").first.text).to eq("Build Number #{build2.id} failed") expect(items.last.elements.to_a("title").first.text).to eq("Build Number #{build1.id} success") end it "should return a JSON if requested" do get :show, params: { repository_path: branch.repository, id: branch, format: :json } results = JSON.parse(response.body) expect(results['id']).to eq(branch.id) expect(results['recent_builds'].length).to eq(Build.count) expect(results['recent_builds'][0]['build']['id']).to eq(build1.id) expect(results['recent_builds'][1]['build']['id']).to eq(build2.id) end context "when the repository is disabled" do let(:branch2) { FactoryBot.create(:branch_on_disabled_repo) } before do build3 = FactoryBot.create(:build, branch_record: branch2, state: 'failed') build_part = FactoryBot.create(:build_part, build_instance: build3) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed') end it "should disable build button" do get :show, params: { repository_path: branch2.repository, id: branch2 } expect(response.body).to match(/disabled="disabled"/) end end context "when the repository is enabled" do let(:branch2) { FactoryBot.create(:branch) } before do build3 = FactoryBot.create(:build, branch_record: branch2, state: 'failed') build_part = FactoryBot.create(:build_part, build_instance: build3) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed') end it "shouldn't disable build button" do get :show, params: { repository_path: branch2.repository, id: branch2 } expect(response.body).to_not match(/disabled="disabled"/) end end end describe "#request_new_build" do let(:branch) { FactoryBot.create(:branch) } subject { post :request_new_build, params: { repository_path: branch.repository.to_param, id: branch.to_param } } context "when there is a new commit on the branch that hasn't been built" do before do @sha = to_40('1') fake_remote_server = double(:sha_for_branch => @sha) allow(RemoteServer).to receive(:for_url).with(branch.repository.url).and_return(fake_remote_server) end it "should create the new build and redirect there" do expect(branch.builds.where(ref: @sha).first).to be_nil subject new_build = branch.builds.where(ref: @sha).first expect(new_build).to be_present expect(response).to redirect_to(repository_build_path(branch.repository, new_build)) end end context "when kochiku has already built the most recent commit on the branch" do let(:branch_head_sha) { "4b41fe773057b2f1e2063eb94814d32699a34541" } before do FactoryBot.create(:build, state: 'errored', branch_record: branch, ref: branch_head_sha) fake_remote_server = double(:sha_for_branch => branch_head_sha) allow(RemoteServer).to receive(:for_url).with(branch.repository.url).and_return(fake_remote_server) end it "should not create a new build" do expect { subject }.to_not change { Build.count } expect(flash[:error]).to be_nil expect(flash[:warn]).to be_present end it "should redirect to the existing build" do subject expect(response).to redirect_to(repository_branch_path(branch.repository, branch)) end end end describe "#health" do let(:branch) { FactoryBot.create(:branch) } context "normal circumstances" do before do build = FactoryBot.create(:build, branch_record: branch, state: 'succeeded') build_part = FactoryBot.create(:build_part, build_instance: build) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed') FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'passed') end it "should render" do get :health, params: { repository_path: branch.repository, id: branch } expect(response).to be_success end end context "no builds are present" do it "should not error" do get :health, params: { repository_path: branch.repository, id: branch } expect(response).to be_success end end context "only older builds are present" do before do # a build from 1 year ago build = FactoryBot.create(:build, branch_record: branch, state: 'failed', created_at: 1.year.ago) build_part = FactoryBot.create(:build_part, build_instance: build, created_at: 1.year.ago) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed', created_at: 1.year.ago) end context "no builds from the last 30 days" do it "should not error" do get :health, params: { repository_path: branch.repository, id: branch } expect(response).to be_success end end context "no builds from the last 7 days" do before do # a build from 10 days ago build = FactoryBot.create(:build, branch_record: branch, state: 'failed', created_at: 10.days.ago) build_part = FactoryBot.create(:build_part, build_instance: build, created_at: 10.days.ago) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed', created_at: 10.days.ago) end it "should not error" do get :health, params: { repository_path: branch.repository, id: branch } expect(response).to be_success end end end end describe "#build_time_history" do # the logic here is tested inside branch_spec and branch_decorator_spec. Just # verify that the endpoint responds ok let(:branch) { FactoryBot.create(:branch) } before do FactoryBot.create(:completed_build, branch_record: branch) end it "should render" do get :build_time_history, params: { repository_path: branch.repository, id: branch, format: :json } expect(response).to be_success end end describe "#status_report" do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:master_branch, repository: repository) } context "when a branch has no builds" do before { expect(branch.builds).to be_empty } it "should return 'Unknown' for activity" do get :status_report, params: { format: :xml } expect(response).to be_success doc = Nokogiri::XML(response.body) element = doc.at_xpath("/Projects/Project[@name='#{repository.to_param}']") expect(element['activity']).to eq('Unknown') end end context "with a in-progress build" do let!(:build) { FactoryBot.create(:build, state: 'running', branch_record: branch) } it "should return 'Building' for activity" do get :status_report, params: { format: :xml } expect(response).to be_success doc = Nokogiri::XML(response.body) element = doc.at_xpath("/Projects/Project[@name='#{repository.to_param}']") expect(element['activity']).to eq('Building') end end context "with a completed build" do let!(:build) { FactoryBot.create(:build, state: 'failed', branch_record: branch) } it "should return 'CheckingModifications' for activity" do get :status_report, params: { format: :xml } expect(response).to be_success doc = Nokogiri::XML(response.body) element = doc.at_xpath("/Projects/Project[@name='#{repository.to_param}']") expect(element['activity']).to eq('CheckingModifications') end end context "with extra convergence branch and one non-convergence" do before do FactoryBot.create(:branch, :name => 'feature-branch', convergence: false, repository: repository) FactoryBot.create(:branch, :name => 'convergence', convergence: true, repository: repository) end it "should include all of the convergence branches" do branch ## Explicitly reference the branch to cause it to load get :status_report, params: { format: :xml } expect(response).to be_success doc = Nokogiri::XML(response.body) elements = doc.xpath("/Projects/Project") expect(elements).to have(2).items names = elements.map{ |e| e.attribute("name").to_s } expect(names).to match_array([repository.to_param, "#{repository.to_param}/convergence"]) end end end end ================================================ FILE: spec/controllers/build_artifacts_controller_spec.rb ================================================ require 'spec_helper' describe BuildArtifactsController do describe "#create" do let(:build) { FactoryBot.create(:build) } let(:build_part) { build.build_parts.create!(:paths => ["a"], :kind => "test", :queue => 'ci') } let(:build_attempt) { build_part.build_attempts.create!(:state => 'failed') } let(:log_file) { fixture_file_upload("/build_artifact.log", 'text/xml') } it "should create a build artifact for the build attempt" do log_contents = log_file.read expect(log_contents).not_to be_empty log_file.rewind expect { post :create, params: { :build_attempt_id => build_attempt.to_param, :build_artifact => {:log_file => log_file}, :format => :xml } }.to change{ build_attempt.build_artifacts.count }.by(1) artifact = assigns(:build_artifact) expect(artifact.log_file.read).to eq(log_contents) end it "should return the correct location" do post :create, params: { :build_attempt_id => build_attempt.to_param, :build_artifact => {:log_file => log_file}, :format => :xml } expect(response).to be_success expect(response.location).to eq(assigns(:build_artifact).log_file.url) end end end ================================================ FILE: spec/controllers/build_attempts_controller_spec.rb ================================================ require 'spec_helper' describe BuildAttemptsController do describe "#start" do it "should set the start time and state of the build attempt" do build_attempt = FactoryBot.create(:build_attempt) expect(build_attempt.state).to eq('runnable') expect(build_attempt.started_at).to be_nil expect(build_attempt.builder).to be_nil post :start, params: { :id => build_attempt.to_param, :builder => "build01", :format => :json } expect(response).to be_success build_attempt.reload expect(build_attempt.state).to eq('running') expect(build_attempt.started_at).not_to be_nil expect(build_attempt.builder).to eq("build01") expect(build_attempt.log_streamer_port).to be_nil end it "should set log streamer port if provided" do build_attempt = FactoryBot.create(:build_attempt) expect(build_attempt.state).to eq('runnable') expect(build_attempt.started_at).to be_nil expect(build_attempt.builder).to be_nil expect(build_attempt.log_streamer_port).to be_nil post :start, params: { :id => build_attempt.to_param, :builder => "build01", :logstreamer_port => 10000, :format => :json } expect(response).to be_success build_attempt.reload expect(build_attempt.state).to eq('running') expect(build_attempt.started_at).not_to be_nil expect(build_attempt.builder).to eq("build01") expect(build_attempt.log_streamer_port).to eq(10000) end it "should return aborted if the build_attempt is aborted" do build_attempt = FactoryBot.create(:build_attempt, :state => 'aborted') post :start, params: { :id => build_attempt.to_param, :builder => "build01", :format => :json } expect(response).to be_success expect(JSON.parse(response.body)["build_attempt"]["state"]).to eq("aborted") expect(build_attempt.reload.state).to eq('aborted') end end describe "#finish" do it "should set the finish time and state of the build attempt" do build_attempt = FactoryBot.create(:build_attempt) expect(build_attempt.state).to eq('runnable') expect(build_attempt.finished_at).to be_nil post :finish, params: { :id => build_attempt.to_param, :state => "passed", :format => :json } expect(response).to be_success build_attempt.reload expect(build_attempt.state).to eq('passed') expect(build_attempt.finished_at).not_to be_nil end it "should return errors when the build_attempt fails to start" do build_attempt = FactoryBot.create(:build_attempt) post :finish, params: { :id => build_attempt.to_param, :state => "invalid-state", :format => :json } expect(response.code).to eq("422") expect(JSON.parse(response.body)['state']).not_to be_blank end it "should redirect to the build_part's URL for HTML requests" do build_attempt = FactoryBot.create(:build_attempt) post :finish, params: { :id => build_attempt.to_param, :state => "aborted", :format => :html } expect(response.code).to eq("302") build_attempt.reload expect(build_attempt.state).to eq('aborted') end end describe "#stream_logs" do it "should return 404 for build attempt that doesn't have log streaming port" do build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => nil) get :stream_logs, params: { :id => build_attempt.to_param, :format => :html } expect(response.code). to eq("404") end end describe "#stream_logs_chunk" do it "should return error for build attempt that doesn't have log streaming port" do build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => nil) get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json } expect(response.code).to eq("500") expect(JSON.parse(response.body)['error']).to eq("No log streaming available for this build attempt") end it "should return error for build attempt that doesn't have builder" do build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => nil) get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json } expect(response.code).to eq("500") expect(JSON.parse(response.body)['error']).to eq("No log streaming available for this build attempt") end context "logstreamer not successful" do before do stub_request(:get, "http://worker.example.com:10000/build_attempts/100/log/stdout.log?maxBytes=250000&start=0").to_return(:status => 500, :body => "{}", :headers => {}) end it "should return error when logstreamer errors" do build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => 'worker.example.com', :id => 100) get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json } expect(response.code).to eq("500") expect(JSON.parse(response.body)['error']).to eq("unable to reach log streamer") end end context "logstreamer successful" do let(:logstreamer_body) { '{"Start" : 0, "Contents" : "This is a test\n", "BytesRead": 15, "LogName": "stdout.log"}' } before do stub_request(:get, "http://worker.example.com:10000/build_attempts/100/log/stdout.log?maxBytes=250000&start=0").to_return(:status => 200, :body => logstreamer_body, :headers => {}) end it "should proxy request from logstreamer and add build attempt state" do build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => 'worker.example.com', :id => 100, state: 'running') get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json } expect(response.code).to eq("200") response_hash = JSON.parse(response.body) logstreamer_hash = JSON.parse(logstreamer_body) expect(response_hash.merge(logstreamer_hash)).to eq(response_hash) # check that response_hash includes all attributes from logstreamer_hash expect(response_hash['state']).to eq("running") end end end end ================================================ FILE: spec/controllers/build_parts_controller_spec.rb ================================================ require 'spec_helper' describe BuildPartsController do render_views let(:build) { FactoryBot.create(:build) } let(:repository) { build.repository } let(:build_part) { build.build_parts.create!(:paths => ["a"], :kind => "test", :queue => 'ci') } describe "#show" do it "renders the show template successfully even if elapsed time is nil" do expect(build_part.elapsed_time).to eq(nil) get :show, params: { repository_path: repository, build_id: build, id: build_part } expect(response).to be_success expect(response).to render_template("build_parts/show") end it "renders JSON if requested" do build_attempt = FactoryBot.create(:build_attempt, build_part: build_part) FactoryBot.create(:build_artifact, build_attempt: build_attempt) get :show, params: { repository_path: repository, build_id: build, id: build_part, format: :json } ret = JSON.parse(response.body) expect(ret['build_part']['build_attempts'].length).to eq(1) expect(ret['build_part']['build_attempts'][0]['files'].length).to eq(1) expect(ret['build_part']['build_attempts'][0]['files'][0]['build_artifact']['build_attempt_id']).to eq(build_attempt.id) end context "when the repository is disabled" do let(:build2) { FactoryBot.create(:build_on_disabled_repo) } let(:build_part2) { FactoryBot.create(:build_part, build_instance: build2) } it "should not show Rebuild button" do get :show, params: { repository_path: build2.repository, build_id: build2, id: build_part2 } expect(response.body).to_not match(/class="rebuild button"/) end end context "when the repository is enabled" do let(:build2) { FactoryBot.create(:build) } let(:build_part2) { FactoryBot.create(:build_part, build_instance: build2) } it "should show Rebuild button" do get :show, params: { repository_path: build2.repository, build_id: build2, id: build_part2 } expect(response.body).to match(/class="rebuild button"/) end end end describe "#rebuild" do subject { get :rebuild, params: { repository_path: repository, build_id: build, id: build_part } } it "should redirect to the right place" do allow_any_instance_of(Build).to receive(:test_command).and_return("echo just chill") subject expect(response).to redirect_to(repository_build_path(build.repository, build)) end context "the requested commit SHA no longer exists" do before do allow_any_instance_of(Build).to receive(:test_command).and_raise(GitRepo::RefNotFoundError) end it "should not create a new build attempt" do build_part # trigger creation of the db records expect { subject }.to_not change { build_part.build_attempts.count } end it "should display a flash error" do subject expect(flash[:error]).to be_present end end end describe '#refresh_build_part_info' do it "returns partials for build_attempts" do build_attempt = FactoryBot.create(:build_attempt, build_part: build_part) FactoryBot.create(:build_artifact, build_attempt: build_attempt) get :refresh_build_part_info, params: { repository_path: repository, build_id: build, id: build_part, format: :json } res = JSON.parse(response.body) expect(res.first['state']).to eq(build_attempt.state) expect(res.first['content']).to include("/build_attempts/#{build_attempt.id}/") end end end ================================================ FILE: spec/controllers/builds_controller_spec.rb ================================================ require 'spec_helper' describe BuildsController do describe "#create" do let(:action) { :create } let(:repo) { FactoryBot.create(:repository) } let!(:branch) { FactoryBot.create(:branch, repository: repo, name: 'gummy-bears') } let(:git_sha) { '30b111147d9a245468c6650f54de5c16584bc154' } let(:params) { { repo_url: repo.url, git_branch: branch.name, git_sha: git_sha, } } it "should return a 404 if the repo does not exist" do repo.destroy post action, params: params expect(response.code).to eq("404") end it "should create a branch record if it does not exist" do branch_name = branch.name branch.destroy expect { post action, params: params }.to change { Branch.exists?(name: branch_name) }.from(false).to(true) end it "should create a new build" do expect { post action, params: params }.to change { Build.exists?(ref: git_sha) }.from(false).to(true) end it "sets merge_on_success when param given" do post action, params: params.merge(merge_on_success: "1") new_build = Build.where(ref: git_sha).first expect(new_build.merge_on_success).to be(true) end it "defaults merge_on_success to false when param not given" do expect(params.key?(:merge_on_success)).to be(false) post action, params: params new_build = Build.where(ref: git_sha).first expect(new_build.merge_on_success).to be(false) end it "should return the build info page in the location header" do post action, params: params new_build = Build.where(ref: git_sha).first expect(new_build).to be_present expect(response.location).to eq(repository_build_url(repo, new_build)) end context "a specific git_sha is not specified" do let(:params) { { repo_url: repo.url, git_branch: branch.name, } } before do mocked_remote_server = RemoteServer.for_url(repo.url) allow(mocked_remote_server).to receive(:sha_for_branch).with(branch.name).and_return(to_40("2")) allow(RemoteServer).to receive(:for_url).with(repo.url).and_return(mocked_remote_server) end it "should create a build for the HEAD commit on the given branch" do expect { post action, params: params }.to change { Build.exists?(ref: to_40("2"), branch_record: branch) }.from(false).to(true) end end context "when the pushed sha has already been built" do it "has no effect" do branch = FactoryBot.create(:branch, repository: repo, name: 'other-branch') build = FactoryBot.create(:build, branch_record: branch, ref: git_sha) expect { post action, params: params expect(response).to be_success }.to_not change(Build, :count) expect(response.headers["Location"]).to eq(repository_build_url(repo, build)) end end context "when the sha is already associated with another branch under this repo" do it "should return a URL to the existing build" do other_branch = FactoryBot.create(:branch, repository: repo) other_build = FactoryBot.create(:build, branch_record: other_branch, ref: git_sha) branch # ensure the 'let' gets invoked post action, params: params expect(response).to be_success expect(response.headers["Location"]).to eq(repository_build_url(repo, other_build)) end end context "when the sha is already used by a different repo" do it "should create a new build" do other_repo = FactoryBot.create(:repository) other_branch = FactoryBot.create(:branch, repository: other_repo) other_build = FactoryBot.create(:build, branch_record: other_branch, ref: git_sha) expect { post action, params: params expect(response).to be_success }.to change(Build, :count).by(1) expect(response.headers["Location"]).to_not eq(repository_build_url(other_repo, other_build)) end end it "should allow the repository url to be in an alternate format" do expect(repo).to_not be_new_record post action, params: params.merge(repo_url: "https://github.com/#{repo.namespace}/#{repo.name}.git") expect(response).to be_success end end describe "#show" do it "should return a valid JSON" do branch = FactoryBot.create(:branch, name: 'gummy-bears') build = FactoryBot.create(:build, branch_record: branch, :test_command => "script/ci") build_part = FactoryBot.create(:build_part, build_instance: build) FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') get :show, params: { repository_path: branch.repository, id: build.id, format: :json } ret = JSON.parse(response.body) expect(ret['build']['build_parts'].length).to eq(1) expect(ret['build']['build_parts'][0]['build_id']).to eq(build.id) expect(ret['build']['build_parts'][0]['status']).to eq('passed') end context "when the repository is disabled" do render_views let(:build) { FactoryBot.create(:build_on_disabled_repo, state: 'failed') } it "should not show 'Rebuild failed parts' button or rebuild action in #build-summary table" do build_part = FactoryBot.create(:build_part, build_instance: build) FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed') get :show, params: { repository_path: build.repository, id: build.id } expect(response.body).to_not match(/Rebuild<\/a>$}) end it "should not show 'Retry Partitioning' button" do build.build_parts.delete_all get :show, params: { repository_path: build.repository, id: build.id } expect(response.body).to_not match(/Rebuild<\/a>$}) end it "should show 'Retry Partitioning' button" do build.build_parts.delete_all get :show, params: { repository_path: build.repository.to_param, id: build.id } expect(response.body).to match(/ build.id, :repository_path => build.repository} end context "has on_success_log_file" do before do output = "Exited with status: 0" script_log = FilelessIO.new(output) script_log.original_filename = "on_success_script.log" build.on_success_script_log_file = script_log build.save end it "displays link to on_success_log_file" do get @action, params: @params doc = Nokogiri::HTML(response.body) elements = doc.search("[text()*='on_success_script.log']") expect(elements.size).to eq(1) end end context "does not have on_success_log_file" do it "does not display link to on_success_log_file" do get @action, params: @params doc = Nokogiri::HTML(response.body) elements = doc.search("[text()*='on_success_script.log']") expect(elements.size).to eq(0) end end end describe "#toggle_merge_on_success" do before do @build = FactoryBot.create(:build, :merge_on_success => true) end it "aborts merge_on_success" do post :toggle_merge_on_success, params: { id: @build.to_param, repository_path: @build.repository.to_param, merge_on_success: false } expect(response).to redirect_to(repository_build_path(@build.repository, @build)) expect(@build.reload.merge_on_success).to be false end it "enables merge_on_success" do @build.update_attributes(:merge_on_success => false) post :toggle_merge_on_success, params: { id: @build.to_param, repository_path: @build.repository.to_param, merge_on_success: true } expect(response).to redirect_to(repository_build_path(@build.repository, @build)) expect(@build.reload.merge_on_success).to be true end end describe "merge_on_success checkbox" do render_views let(:build) { FactoryBot.create(:build) } before do @action = :show @params = {:id => build.id, :repository_path => build.repository} end it "renders the merge_on_success checkbox" do get @action, params: @params doc = Nokogiri::HTML(response.body) elements = doc.css("input[name=merge_on_success]") expect(elements.size).to eq(1) expect(elements.first['checked']).to be_blank end context "for builds with merge_on_success enabled" do let(:build) { FactoryBot.create(:build, merge_on_success: true) } it "renders the merge_on_success checkbox" do get @action, params: @params doc = Nokogiri::HTML(response.body) elements = doc.css("input[name=merge_on_success]") expect(elements.size).to eq(1) expect(elements.first['checked']).to be_present end end context "for builds on a convergence branch" do let(:build) { FactoryBot.create(:convergence_branch_build) } it "renders the merge_on_success checkbox disabled" do get @action, params: @params doc = Nokogiri::HTML(response.body) elements = doc.css("input[name=merge_on_success]") expect(elements.size).to eq(1) expect(elements.first['disabled']).to be_present end end end describe "#rebuild_failed_parts" do let(:build) { FactoryBot.create(:build) } let(:parts) { (1..4).map { FactoryBot.create(:build_part, :build_instance => build) } } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end subject { post :rebuild_failed_parts, params: { repository_path: build.repository.to_param, id: build.id } } context "happy path" do before do @attempt_1 = FactoryBot.create(:build_attempt, :build_part => parts[0], :state => 'failed') @attempt_2 = FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'failed') @attempt_3 = FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'errored') @attempt_4 = FactoryBot.create(:build_attempt, :build_part => parts[2], :state => 'passed') @attempt_5 = FactoryBot.create(:build_attempt, :build_part => parts[3], :state => 'aborted') end it "rebuilds all failed attempts" do expect(build.build_parts.failed_errored_or_aborted.count).to eq(3) subject expect(build.reload.build_parts.failed.count).to be_zero expect(build.build_attempts.count).to eq(5 + 3) end it "only enqueues one build attempt for each failed build part" do subject expect(parts[0].reload.build_attempts.count).to eq(2) expect(parts[1].reload.build_attempts.count).to eq(3) expect(parts[3].reload.build_attempts.count).to eq(2) expect { # repost to test idempotency post :rebuild_failed_parts, params: { repository_path: build.repository.to_param, id: build.id } }.to_not change(BuildAttempt, :count) end end context "an successful prior build attempt should not be rebuilt" do it "does something" do FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'passed') # attempt 1 FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'failed') # attempt 2 expect { subject }.to_not change(BuildAttempt, :count) end end end describe "#retry_partitioning" do let!(:build) { FactoryBot.create(:build) } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end context "when there are no build parts" do it "enques a partitioning job" do expect(Resque).to receive(:enqueue) post :retry_partitioning, params: { repository_path: build.repository.to_param, id: build.id } expect(response).to redirect_to(repository_build_path(build.repository, build)) end end context "when there are already build parts" do it "does nothing" do expect(Resque).to_not receive(:enqueue) FactoryBot.create(:build_part, build_instance: build) post :retry_partitioning, params: { repository_path: build.repository.to_param, id: build.id } expect(response).to redirect_to(repository_build_path(build.repository, build)) end end context "when the build's repository is disabled" do it "should not partition build" do build2 = FactoryBot.create(:build_on_disabled_repo) expect(Resque).to_not receive(:enqueue) post :retry_partitioning, params: { repository_path: build2.repository.to_param, id: build2.id } expect(response).to redirect_to(repository_build_path(build2.repository, build2)) end end end describe "#build_redirect" do it "should redirect to the full build show url" do build = FactoryBot.create(:build) get :build_redirect, params: { id: build.id } expect(response).to redirect_to(repository_build_path(build.repository, build)) end end describe "#build_ref_redirect" do it "should redirect to the build show url that matches the ref given" do build = FactoryBot.create(:build) get :build_ref_redirect, params: { ref: build.ref[0, 8] } expect(response).to redirect_to(repository_build_path(build.repository, build)) end end end ================================================ FILE: spec/controllers/dashboards_controller_spec.rb ================================================ require 'spec_helper' describe DashboardsController do describe "#build_history_by_worker" do it "should render worker health page" do get :build_history_by_worker expect(response).to be_success end end end ================================================ FILE: spec/controllers/pull_requests_controller_spec.rb ================================================ require 'spec_helper' describe PullRequestsController do describe "#build" do shared_examples "common behavior" do |git_server_type| before do class_eval do alias_method :push_payload, "#{git_server_type}_push_payload".to_sym alias_method :pull_request_payload, "#{git_server_type}_pull_request_payload".to_sym end end let!(:repository) do FactoryBot.create(:repository, { url: "git@#{git_server_type}.com:square/web.git" }.merge(repository_fields)) end let(:repository_fields) { {} } # expected to be overwritten by a sub-context context "when push requests come" do let(:repository_fields) { { run_ci: true } } let!(:master_branch) { FactoryBot.create(:master_branch, repository: repository) } let!(:convergence_branch) { FactoryBot.create(:branch, repository: repository, convergence: true, name: "convergence_branch") } it "creates a build" do expect(Build.where(branch_id: master_branch.id, ref: to_40('2'))).to_not exist expect { post :build, params: push_payload expect(response).to be_success }.to change(Build, :count).by(1) expect(Build.where(branch_id: master_branch.id, ref: to_40('2'))).to exist end it "does not create a build for pushes to non-convergence branches" do expect { post :build, params: push_payload("ref" => "refs/heads/some-branch") expect(response).to be_success }.to_not change(Build, :count) end it "does not create a build if repository has ci disabled" do repository.update_attributes!(:run_ci => false) expect { post :build, params: push_payload expect(response).to be_success }.to_not change(Build, :count) end it "does not build if there is already a ci build in progress" do master_branch.builds.create!(:ref => to_40('w'), :state => 'succeeded') frozen_time = 3.seconds.from_now allow(Time).to receive(:now).and_return(frozen_time) master_branch.builds.create!(:ref => to_40('y'), :state => 'partitioning') expect { post :build, params: push_payload expect(response).to be_success }.to_not change(Build, :count) end it "builds for convergence branches" do expect { post :build, params: push_payload("ref" => "refs/heads/convergence_branch") expect(response).to be_success }.to change(Build, :count).by(1) end it "builds if there is completed ci build" do master_branch.builds.create!(:ref => to_40('w'), :state => 'succeeded') expect { post :build, params: push_payload expect(response).to be_success }.to change(Build, :count).by(1) end it "builds if there is a completed ci build after a build that is still building" do master_branch.builds.create!(:ref => to_40('w'), :state => 'partitioning') frozen_time = 3.seconds.from_now allow(Time).to receive(:now).and_return(frozen_time) master_branch.builds.create!(:ref => to_40('y'), :state => 'succeeded') expect { post :build, params: push_payload expect(response).to be_success }.to change(Build, :count).by(1) end it "it should not error if the repository url in the request is not found" do key = git_server_type == "github" ? "ssh_url" : "url" repository_overrides = { key => "git@git.#{git_server_type}.com:doesnot/exist.git", "host" => nil, "key" => nil, "slug" => nil, "name" => nil, "full_name" => nil } expect { post :build, params: push_payload("repository" => repository_overrides) expect(response).to be_success }.to_not change(Build, :count) end end context "for pull requests" do let(:repository_fields) { { run_ci: true, build_pull_requests: true } } context "when there is no existing Branch record" do it "should create a Branch record on demand" do expect(Branch.where(repository: repository, name: "branch-name")).to_not exist post :build, params: pull_request_payload expect(response).to be_success expect(Branch.where(repository: repository, name: "branch-name")).to exist end end context "when the pull request sha has already been built" do before do @github_payload = pull_request_payload( "pull_request" => { "head" => {"sha" => "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref" => "branch-name"}, "body" => "best pull request ever", }) end let!(:branch) { FactoryBot.create(:branch, repository: repository, name: "branch-name") } it "has no effect" do FactoryBot.create(:build, branch_record: branch, branch_id: branch.id, ref: "de8251ff97ee194a289832576287d6f8ad74e3d0") expect { post :build, params: @github_payload expect(response).to be_success }.to_not change(Build, :count) end it "still creates a build if the sha is used by a different repo" do common_sha = "de8251ff97ee194a289832576287d6f8ad74e3d0" # create build with the same ref on a different repository repo2 = FactoryBot.create(:repository, url: "git@git.#{git_server_type}.com:square/other-repo.git") repo2_branch = FactoryBot.create(:branch, repository: repo2) FactoryBot.create(:build, branch_record: repo2_branch, ref: common_sha) expect(Build.where(branch_id: branch.id, ref: common_sha)).to_not exist post :build, params: @github_payload expect(Build.where(branch_id: branch.id, ref: common_sha)).to exist end end context do let(:branch) { FactoryBot.create(:branch, repository: repository, name: "branch-name") } it "creates a build for a pull request" do expect(Build.where(branch_id: branch.id, ref: to_40('1'))).to_not exist expect { post :build, params: pull_request_payload expect(response).to be_success }.to change(Build, :count).by(1) expect(Build.where(branch_id: branch.id, ref: to_40('1'))).to exist end it "does not create a build if build_pull_requests is disabled" do repository.update_attribute(:build_pull_requests, false) expect { post :build, params: pull_request_payload({"pull_request" => {"body" => "don't build it"}}) expect(response).to be_success }.to_not change(branch.builds, :count) end it "does not build a closed pull request" do closed_action = git_server_type == "github" ? {"pull_request" => {"state" => "closed"}} : {"action" => "closed"} expect { post :build, params: pull_request_payload(closed_action) expect(response).to be_success }.to_not change(branch.builds, :count) end it "does not blow up if action is missing" do post :build, params: pull_request_payload({"action" => nil}) expect(response).to be_success end context "when there are other builds for the same branch" do let!(:build_one) { FactoryBot.create(:build, :branch_record => branch, :ref => to_40('w')) } let!(:build_two) { FactoryBot.create(:build, :branch_record => branch, :ref => to_40('y')) } it "aborts previous builds of the same branch" do github_payload = pull_request_payload( "pull_request" => { "head" => {"sha" => to_40('z'), "ref" => branch.name}, "body" => "best pull request ever", }) expect { post :build, params: github_payload expect(response).to be_success }.to change(Build, :count) expect(build_one.reload).to be_aborted expect(build_two.reload).to be_aborted end end end end it "does not blow up if pull_request data is missing" do expect { post :build, params: pull_request_payload({"pull_request" => nil}) expect(response).to be_success }.to_not change(Build, :count) end it "should not error if the repository url in the request is not found" do expect { pr_payload = pull_request_payload("repository" => { "ssh_url" => "git@git.#{git_server_type}.com:doesnot/exist.git" }) post :build, params: pr_payload expect(response).to be_success }.to_not change(Build, :count) end end context "from github" do before do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github YAML stub_const "Settings", settings end include_examples "common behavior", "github" end context "from stash" do before do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.com: type: stash YAML stub_const "Settings", settings end include_examples "common behavior", "stash" end end def github_push_payload(options = {}) { "ref" => "refs/heads/master", "repository" => { "name" => "web", "full_name" => "square/web", "ssh_url" => "git@github.com:square/web.git", }, "head_commit" => { "id" => to_40('2') } }.deep_merge(options) end def github_pull_request_payload(options = {}) { "action" => "opened", "pull_request" => { "state" => "open", "head" => { "ref" => "branch-name", "sha" => to_40('1') } }, "repository" => { "name" => "web", "full_name" => "square/web", "owner" => { "login" => "square" }, "ssh_url" => "git@github.com:square/web.git" } }.deep_merge(options) end def stash_push_payload(options = {}) { "payload" => { "after" => to_40('2'), "repository" => { "id" => "252", "url" => "https://stash.com/scm/square/web.git", "key" => "square", "slug" => "web", "name" => "web", }, "host" => "stash.com", "ref" => "refs/heads/master", }.deep_merge(options).to_json } end def stash_pull_request_payload(options = {}) { "payload" => { "pull_request" => { "head" => { "sha" => to_40('1'), "ref" => "refs/heads/branch-name", "to_ref" => "refs/heads/master" }, "body" => "best pull request ever", "title" => "this is a pull request", }, "repository" => { "id" => "252", "url" => "https://stash.com/scm/square/web.git", "key" => "square", "slug" => "web", "name" => "web", }, "host" => "stash.com", "action" => "synchronize", "type" => "pr" }.deep_merge(options).to_json } end end ================================================ FILE: spec/controllers/repositories_controller_spec.rb ================================================ require 'spec_helper' describe RepositoriesController do render_views describe "create action" do before do @params = { repository: { url: "git@git.example.com:square/kochiku.git", test_command: "script/something" }, convergence_branches: "", } end it "should perform a basic create" do expect{ post :create, params: @params expect(response).to be_redirect }.to change(Repository, :count).by(1) repository = Repository.where(url: "git@git.example.com:square/kochiku.git").first expect(repository).to be_present expect(repository.name).to eq('kochiku') end it "sets host, namespace, and name based on the repo url" do post :create, params: @params repository = Repository.where(url: "git@git.example.com:square/kochiku.git").first expect(repository.host).to eq('git.example.com') expect(repository.namespace).to eq('square') expect(repository.name).to eq('kochiku') end it "creates a branch_record for the convergence branches" do post :create, params: @params.merge(convergence_branches: "master, release-1-x") expect(response).to be_redirect expect(Branch.exists?(name: 'master', convergence: true)).to be(true) expect(Branch.exists?(name: 'release-1-x', convergence: true)).to be(true) end context "with validation errors" do it "re-renders form with errors" do # timeout outside of the allowable range @params[:repository][:timeout] = '1000000' post :create, params: @params expect(response).to be_success expect(assigns[:repository].errors.full_messages.join(',')) .to include("The maximum timeout allowed is 1440 minutes") expect(response).to render_template('new') end end end describe "update" do let!(:repository) { FactoryBot.create(:repository, :url => "git@git.example.com:square/kochiku.git") } it "updates existing repository" do expect{ patch :update, params: { :id => repository.id, :repository => {:url => "git@git.example.com:square/kochiku-worker.git"} } expect(response).to be_redirect }.to_not change(Repository, :count) repository.reload expect(repository.url).to eq("git@git.example.com:square/kochiku-worker.git") expect(response).to be_redirect end context "with invalid data" do let(:params) { { timeout: 'abc' } } it "re-renders the edit page" do patch :update, params: { id: repository.id, repository: params } expect(response).to be_success expect(response).to render_template('edit') end end # boolean attributes [ :enabled, :run_ci, :build_pull_requests, :send_build_failure_email, :send_build_success_email, :send_merge_successful_email, :allows_kochiku_merges ].each do |attribute| it "should successfully update the #{attribute} attribute" do start_value = repository.send(attribute) inverse_value_as_str = start_value ? "0" : "1" patch :update, params: { id: repository.id, repository: { attribute => inverse_value_as_str } } repository.reload expect(repository.send(attribute)).to eq(!start_value) end end # integer attributes [:timeout].each do |attribute| it "should successfully update the #{attribute} attribute" do new_value = rand(1440) # max imposed by repository validation patch :update, params: { id: repository.id, repository: { attribute => new_value } } repository.reload expect(repository.send(attribute)).to eq(new_value) end end # string attributes [ :on_green_update, ].each do |attribute| it "should successfully update the #{attribute} attribute" do new_value = "Keytar Intelligentsia artisan typewriter 3 wolf moon" patch :update, params: { id: repository.id, repository: { attribute => new_value } } repository.reload expect(repository.send(attribute)).to eq(new_value) end end describe "of convergence branches" do it "should set convergence on new branches in the list" do # branchA already has convergence branchA = FactoryBot.create(:branch, repository: repository, name: 'branchA', convergence: true) # branchB does not have convergence branchB = FactoryBot.create(:branch, repository: repository, name: 'branchB', convergence: false) # branchC does not have convergence branchC = FactoryBot.create(:branch, repository: repository, name: 'branchC', convergence: false) # branchD does not yet exist patch :update, params: { id: repository.id, repository: {timeout: 10}, convergence_branches: "branchA,branchB,branchD" } expect(branchA.reload).to be_convergence expect(branchB.reload).to be_convergence expect(branchC.reload).to_not be_convergence branchD = repository.branches.where(name: 'branchD').first! expect(branchD).to be_convergence end it "should remove convergence from branches no longer in the list" do # branchA has convergence branchA = FactoryBot.create(:branch, repository: repository, name: 'branchA', convergence: true) # branchB has convergence branchB = FactoryBot.create(:branch, repository: repository, name: 'branchB', convergence: true) # branchC does not have convergence branchC = FactoryBot.create(:branch, repository: repository, name: 'branchC', convergence: false) patch :update, params: { id: repository.id, repository: {timeout: 10}, convergence_branches: "branchA" } expect(branchA.reload).to be_convergence expect(branchB.reload).to_not be_convergence expect(branchC.reload).to_not be_convergence end end end describe "delete /repositories/:id" do let!(:repository) { FactoryBot.create(:repository, :url => "git@git.example.com:square/kochiku.git", :test_command => "script/something") } it "responds with success" do expect { get :destroy, params: { :id => repository.id } expect(response).to be_redirect }.to change(Repository, :count).by(-1) end end describe "get /repositories" do it "responds with success" do get :index expect(response).to be_success end end describe "get /:namespace/:name/edit" do it "responds with success" do get :edit, params: { repository_path: FactoryBot.create(:repository).to_param } expect(response).to be_success end end describe "get /repositories/new" do it "responds with success" do get :new expect(response).to be_success end end describe "get /dashboard" do let(:repository) { FactoryBot.create(:repository) } let!(:master_branch) { FactoryBot.create(:master_branch, repository: repository) } let!(:non_master_branch) { FactoryBot.create(:branch, :name => 'feature-branch', repository: repository) } it "displays the build status of only the master branches" do get :dashboard expect(response).to be_success doc = Nokogiri::HTML(response.body) elements = doc.css(".projects .ci-build-info") expect(elements.size).to eq(1) end end describe 'post /build-ref' do let(:repository) { FactoryBot.create(:repository) } let(:fake_sha) { to_40('1') } it "creates a master build with query string parameters" do post :build_ref, params: { id: repository.id, ref: 'master', sha: fake_sha } verify_response_creates_build response, 'master', fake_sha end it "creates a master build with payload" do post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/master', toHash: fake_sha}] } verify_response_creates_build response, 'master', fake_sha end it "creates a branch build with query string parameters" do post :build_ref, params: { id: repository.id, ref: 'blah', sha: fake_sha } verify_response_creates_build response, 'blah', fake_sha end it "creates a branch build with payload" do post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/blah', toHash: fake_sha}] } verify_response_creates_build response, 'blah', fake_sha end it "creates a branch build for a branch name with slashes" do post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/blah/with/a/slash', toHash: fake_sha}] } verify_response_creates_build response, 'blah/with/a/slash', fake_sha end def verify_response_creates_build(response, branch_name, ref) expect(response).to be_success json = JSON.parse(response.body) build_hash = json['builds'][0] build = Build.find(build_hash['id']) expect(build_hash['build_url']).not_to be_nil expect(build.branch_record.name).to eq(branch_name) expect(build.ref).to eq(ref) end context "a convergence branch" do let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) } it "should not abort previous in-progress builds" do earlier_build = FactoryBot.create(:build, state: 'runnable', branch_record: branch) post :build_ref, params: { id: repository.id, ref: branch.name, sha: fake_sha } expect(earlier_build.reload.state).to eq('runnable') end end context "not a convergence branch" do let(:branch) { FactoryBot.create(:branch, repository: repository) } it "should abort all previous in-progress builds" do earlier_build = FactoryBot.create(:build, state: 'runnable', branch_record: branch) post :build_ref, params: { id: repository.id, ref: branch.name, sha: fake_sha } expect(earlier_build.reload.state).to eq('aborted') end end end end ================================================ FILE: spec/controllers/status_controller_spec.rb ================================================ require 'spec_helper' describe StatusController do describe "#available" do context "when the site is available" do it "should return 200" do get :available expect(response.code).to eq("200") end end context "when the site is unavailable" do it "should return 503" do expect(File).to receive(:exist?).and_return(true) get :available expect(response.code).to eq("503") end end end end ================================================ FILE: spec/decorators/branch_decorator_spec.rb ================================================ require 'spec_helper' require 'build' describe BranchDecorator do describe "#most_recent_build_state" do let(:branch) { instance_double("Branch") } let(:decorated_branch) { BranchDecorator.new(branch) } context "when at least one build is present" do before do allow(branch).to receive(:most_recent_build) { instance_double("Build", state: 'running') } end it "returns the state of the most recent build" do expect(decorated_branch.most_recent_build_state).to eq('running') end end context "there are no builds for the branch" do before do allow(branch).to receive(:most_recent_build).and_return(nil) end it "returns 'unknown'" do expect(decorated_branch.most_recent_build_state).to eq('unknown') end end end describe "#last_build_duration" do let(:branch) { instance_double("Branch") } let(:decorated_branch) { BranchDecorator.new(branch) } context "with a completed build" do before do allow(branch).to receive(:last_completed_build) { instance_double("Build", state: 'succeeded', elapsed_time: 60) } end it "gets the duration of the last completed build" do expect(decorated_branch.last_build_duration).to eq(60) end end context "without a completed build" do before do allow(branch).to receive(:last_completed_build).and_return(nil) end it "returns nil" do expect(decorated_branch.last_build_duration).to be_nil end end end describe '#build_time_history' do subject { decorated_branch.build_time_history } let(:branch) do proj = instance_double("Branch") allow(proj).to receive(:timing_data_for_recent_builds) { [ @cucumber1 = ["cucumber", "fb25a", 55, 43, 0, 72550, "succeeded", "2014-03-01 22:45:39 UTC"], @jasmine1 = ["jasmine", "fb25a", 2, 0, 0, 72550, "succeeded", "2014-03-01 22:45:39 UTC"], @rubocop1 = ["rubocop", "fb25a", 3, 0, 0, 72550, "succeeded", "2014-03-01 22:45:39 UTC"], @cucumber2 = ["cucumber", "f4235", 55, 44, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], @jasmine2 = ["jasmine", "f4235", 2, 0, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], @rubocop2 = ["rubocop", "f4235", 3, 0, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], ] } proj end let(:decorated_branch) { BranchDecorator.new(branch) } it "should bucket the builds by type" do should == { "cucumber" => [@cucumber1, @cucumber2], "jasmine" => [@jasmine1, @jasmine2], "rubocop" => [@rubocop1, @rubocop2], } end context 'when the branch has never been built' do let(:branch) { instance_double("Branch", :timing_data_for_recent_builds => []) } it { should == {} } end context 'when the some build types are missing from builds' do let(:branch) do proj = instance_double("Branch") allow(proj).to receive(:timing_data_for_recent_builds) { [ @pants1 = ["pants", "fb25a", 55, 43, 0, 72550, "succeeded", "2014-03-01 22:45:39 UTC"], @findbugs1 = ["findbugs", "fb25a", 2, 0, 0, 72550, "succeeded", "2014-03-01 22:45:39 UTC"], @pants2 = ["pants", "f4235", 3, 0, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], @findbugs2 = ["findbugs", "f4235", 55, 44, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], @errorprone2 = ["errorprone", "f4235", 2, 0, 0, 72560, "succeeded", "2014-03-02 00:37:55 UTC"], @pants3 = ["pants", "ef570", 3, 0, 0, 72568, "succeeded", "2014-03-02 01:23:50 UTC"], ] } proj end it 'should sort the builds and add empty values for missing build parts' do should == { 'pants' => [@pants1, @pants2, @pants3], 'findbugs' => [@findbugs1, @findbugs2, []], 'errorprone' => [[], @errorprone2, []], } end end end end ================================================ FILE: spec/decorators/build_part_decorator_spec.rb ================================================ require 'spec_helper' describe BuildPartDecorator do describe "#most_recent_stdout_artifact" do let(:artifact) { FactoryBot.create(:build_artifact, :log_file => File.open(FIXTURE_PATH + file)) } let(:build_attempt) { artifact.build_attempt } let(:build_part) { BuildPartDecorator.new(build_attempt.build_part) } subject { build_part.most_recent_stdout_artifact } before do FactoryBot.create(:build_artifact) end context "stdout.log" do let(:file) { "stdout.log" } it { should == artifact } end context "stdout.log.gz" do let(:file) { "stdout.log.gz" } it { should == artifact } end context "not present" do let(:file) { "build_artifact.log" } it { should be_nil } end end end ================================================ FILE: spec/features/integration_spec.rb ================================================ # coding: utf-8 require "spec_helper" feature "viewing an in process build" do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:master_branch, repository: repository) } let(:build) { FactoryBot.create(:build, branch_record: branch) } let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :queue => 'ci') } let!(:build_attempt) { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'runnable') } it "view the current status of the build attempts" do build.update_attribute(:state, 'runnable') visit('/') expect(page).to have_content(repository.name) expect(first(".ci-build-info .state")).to have_content("Runnable") click_link(repository.name) expect(page).to have_content(build.ref[0, 5]) click_link(build.ref[0, 5]) within("table.build-summary") do expect(find("td:nth-child(1)")).to have_content(build_part.id) expect(find("td:nth-child(2)")).to have_content("Runnable") expect(find("td:nth-child(4)")).to have_content("test") click_link(build_part.id.to_s) end expect(find(".subheader")).to have_content("#{build.ref[0, 7]} – #{build_part.kind} (part #{build_part.id})") expect(all(".build-part-info tbody tr").size).to eq(1) end it "should return to the home page when the logo is clicked" do # visit a deep page visit repository_build_part_path(repository, build, build_part) expect(page).to have_content("Runnable on ci queue") click_link("Home") expect(current_path).to eq(root_path) end end feature "a failed build" do before :each do @build_attempt = FactoryBot.create(:build_attempt, :state => 'failed') @build_part = @build_attempt.build_part allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end it "can be rebuilt" do build_part_page = repository_build_part_path(@build_part.build_instance.repository, @build_part.build_instance, @build_part) visit(build_part_page) expect(all(".build-part-info tbody tr").size).to eq(1) click_link("Rebuild") visit(build_part_page) expect(all(".build-part-info tbody tr").size).to eq(2) end end feature "requesting a new build of a branch" do before :each do @repository = FactoryBot.create(:repository, url: "git@github.com:square/kochiku.git") @branch_name = "test/branch" @branch = FactoryBot.create(:branch, name: @branch_name, repository: @repository) @branch_head_sha = "4b41fe773057b2f1e2063eb94814d32699a34541" build_ref_info = < 200, :body => build_ref_info) end it "creates a new build if a branch is given" do visit(repository_branch_path(@repository, @branch)) click_button('Build') expect(page).to have_content(@branch_head_sha[0..4]) expect(find(".flash.message")).to have_content("New build started for 4b41fe773057b2f1e2063eb94814d32699a34541 on test/branch") end end ================================================ FILE: spec/fixtures/build_artifact.log ================================================ Stuff happened. It was awesome. ================================================ FILE: spec/fixtures/sample_github_webhook_payload.json ================================================ { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "name": "github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "chris@ozmm.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "chris@ozmm.org", "name": "Chris Wanstrath" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "chris@ozmm.org", "name": "Chris Wanstrath" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00" } ], "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } ================================================ FILE: spec/fixtures/stdout.log ================================================ This is stdout. ================================================ FILE: spec/helpers/application_helper_spec.rb ================================================ require 'spec_helper' describe ApplicationHelper do include ActionView::Helpers include Haml::Helpers let(:repository) { FactoryBot.create(:repository, :url => "git@git.example.com:square/web.git") } let(:branch) { FactoryBot.create(:branch, repository: repository, name: "nomnomnom") } before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github stash.example.com: type: stash YAML stub_const "Settings", settings end before do @build = Build.new(ref: "SHA1FORCOMMIT", branch_record: branch) end describe "#build_success_in_words" do it "should return success when state = 'succeeded'" do @build.state = 'succeeded' expect(build_success_in_words(@build)).to eq('success') end it "should return failed when state = 'errored'" do @build.state = 'errored' expect(build_success_in_words(@build)).to eq('failed') end it "should return failed when state = 'doomed'" do @build.state = 'doomed' expect(build_success_in_words(@build)).to eq('failed') end it "should return state otherwise" do @build.state = 'partitioning' expect(build_success_in_words(@build)).to eq('partitioning') end end describe "#link_to_commit" do it "should create a link to the github url" do expect(link_to_commit(@build.repository, @build.ref)).to eq(%{
SHA1FOR}) end end describe "#show_link_to_commit" do it "should create a url to github based on config" do expect(show_link_to_commit(@build.repository, @build.ref)).to eq('https://git.example.com/square/web/commit/SHA1FORCOMMIT') end end describe "#show_link_to_compare" do let(:branch_stash) { FactoryBot.create(:branch, repository: repository_stash, name: "okay") } let(:branch_stash_no_greenupdate) { FactoryBot.create(:branch, repository: repository_stash_no_greenupdate, name: "okay") } let(:repository_stash) { FactoryBot.create(:stash_repository, url: "https://stash.example.com/scm/square/web2.git", host: "stash.example.com", namespace: "square", on_green_update: "green,red") } let(:repository_stash_no_greenupdate) { FactoryBot.create(:stash_repository, url: "https://stash.example.com/scm/square/web3.git", host: "stash.example.com", namespace: "square", on_green_update: "") } it "creates a url to github showing the diff between 2 SHAs" do expect(show_link_to_compare(@build, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://git.example.com/square/web/compare/SHA1FORCOMMIT...SHA2FORCOMMIT#files_bucket') end it "creates a url to stash showing the diff between master and green branches" do build_stash = Build.new(ref: "SHA1FORCOMMIT", branch_record: branch_stash) expect(show_link_to_compare(build_stash, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://stash.example.com/projects/SQUARE/repos/web2/compare/commits?targetBranch=green&sourceBranch=refs%2Fheads%2Fmaster') end it "creates a url to stash showing a comparison with master if no green branch set" do build_stash_no_greenupdate = Build.new(ref: "SHA1FORCOMMIT", branch_record: branch_stash_no_greenupdate) expect(show_link_to_compare(build_stash_no_greenupdate, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://stash.example.com/projects/SQUARE/repos/web3/compare/commits?targetBranch=refs%2Fheads%2Fmaster') end end describe "timeago" do it "should generate the correct abbr tag" do timestamp = Time.at(0).utc expect(timeago(timestamp)).to eq( %{#{timestamp}} ) # the inner_html is not hardcoded because it is timezone dependent end end end ================================================ FILE: spec/helpers/build_helper_spec.rb ================================================ require 'spec_helper' describe BuildHelper do include ActionView::Helpers include Haml::Helpers let(:build) { FactoryBot.create(:build) } describe "#multiple_ruby_versions?" do context "with a ruby build with multiple ruby versions" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) } let!(:build_part2) { FactoryBot.create(:build_part, :build_instance => build, :options => options2) } let(:options) { {"ruby" => "1.9.3-p194"} } let(:options2) { {"ruby" => "2.0"} } it "returns true" do expect(multiple_ruby_versions?(build)).to equal(true) end end context "with a ruby build with only one ruby version" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) } let(:options) { {"ruby" => "1.9.3-p194"} } it "returns false" do expect(multiple_ruby_versions?(build)).to equal(false) end end context "with a non-ruby build" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) } let(:options) { {} } it "returns false" do expect(multiple_ruby_versions?(build)).to equal(false) end end end context "with a ruby build with multiple ruby versions" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) } let!(:build_part2) { FactoryBot.create(:build_part, :build_instance => build, :options => options2) } let(:options) { {"ruby" => "1.9.3-p194"} } let(:options2) { {"ruby" => "2.0"} } it "returns the ruby version info" do expect(build_metadata_headers(build, true)).to include("Ruby Version") expect(build_metadata_values(build, build_part, true)).to include("1.9.3-p194") end end context "with a build only having one target" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :paths => ['a']) } it "returns the info" do expect(build_metadata_headers(build, false)).to eq(["Target"]) expect(build_metadata_values(build, build_part, false)).to include("a") end end context "with a build with paths" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :paths => ['a', 'b']) } it "returns the info" do expect(build_metadata_headers(build, false)).to include("Paths") metadata_values = build_metadata_values(build, build_part, false).first expect(metadata_values).to start_with(build_part.paths.size.to_s) doc = Nokogiri::HTML(metadata_values) node = doc.at_css('span') expect(node['title']).to eq('a, b') expect(node.inner_html).to eq("(a, b)") end end context "with a build with multiple chunks" do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :paths => ['a', 'b'], :options => {'total_workers' => 5, 'worker_chunk' => 3}) } it "displays worker chunk in paths" do expect(format_paths(build_part)).to eq("a - Chunk 3 of 5") end end context "a build part with no paths (/dev/null)" do let!(:build_part) { FactoryBot.create(:build_part, build_instance: build, paths: ['/dev/null'], kind: 'lint-check') } it "displays the BuildPart kind instead of /dev/null" do expect(format_paths(build_part)).to eq("lint-check") end end end ================================================ FILE: spec/helpers/project_stats_helper_spec.rb ================================================ require 'spec_helper' describe ProjectStatsHelper do before do @builds = [] end def create_some_builds_with_build_attempts(count) count.times do ba = FactoryBot.create(:build_attempt, :state => 'passed') ba.build_instance.update_state_from_parts! @builds << ba.build_instance end end describe 'error_free_pass_rate' do subject { helper.error_free_pass_rate(@builds) } context "when all attempts passed" do before { create_some_builds_with_build_attempts(3) } it { should == '100%' } end context "when some parts failed before passing" do before do create_some_builds_with_build_attempts(3) FactoryBot.create(:build_attempt, :state => 'failed', :build_part => @builds.first.build_parts.first) end it { should == '67%' } end context "when not all parts ever passed" do before do create_some_builds_with_build_attempts(2) @builds.first.update_attributes! :state => 'failed' end it { should == '50%' } end context "when the latest build part is running" do before do create_some_builds_with_build_attempts(3) @builds.last.update_attribute(:state, 'running') end it "should not count running build" do expect(subject).to eq('100%') end end end describe 'eventual_pass_rate' do subject { helper.eventual_pass_rate(@builds) } context "when all attempts passed" do before { create_some_builds_with_build_attempts(2) } it { should == '100%' } end context "when some parts failed before passing" do before do create_some_builds_with_build_attempts(1) failed_first = FactoryBot.create(:build_attempt, :state => 'failed') FactoryBot.create(:build_attempt, :state => 'passed', :build_part => failed_first.build_part) failed_first.build_instance.update_state_from_parts! @builds << failed_first.build_instance end it { should == '100%' } end context "when not all parts ever passed" do before do never_passed = FactoryBot.create(:build_attempt, :state => 'failed') FactoryBot.create(:build_attempt, :state => 'failed', :build_part => never_passed.build_part) never_passed.build_instance.update_state_from_parts! @builds << never_passed.build_instance create_some_builds_with_build_attempts(1) end it { should == '50%' } end end describe 'pass_rate_text' do subject { helper.pass_rate_text(number) } context "for a perfect score" do let(:number) { 1.000000 } it { should == '100%' } end context "for a zero score" do let(:number) { 0 } it { should == '0%' } end context "for a middlin' score" do let(:number) { 0.421643 } it { should == '42%' } end end describe 'average_number_of_rebuilds' do subject { helper.average_number_of_rebuilds(@builds) } before do # setup test with successful two builds containing varying build attempts ba = FactoryBot.create(:build_attempt, :state => 'errored') FactoryBot.create(:build_attempt, :state => 'failed', :build_part => ba.build_part) FactoryBot.create(:build_attempt, :state => 'passed', :build_part => ba.build_part) ba.build_instance.update_state_from_parts! @builds << ba.build_instance ba = FactoryBot.create(:build_attempt, :state => 'failed') FactoryBot.create(:build_attempt, :state => 'passed', :build_part => ba.build_part) ba.build_instance.update_state_from_parts! @builds << ba.build_instance end it { should == 1.5 } context 'when there is an unsuccessful build' do before do ba = FactoryBot.create(:build_attempt, :state => 'errored') ba.build_instance.update_state_from_parts! @builds << ba.build_instance end it 'should not impact the result' do should == 1.5 end end end describe 'median_elapsed_time' do subject { helper.median_elapsed_time(@builds) } before do 5.times do |i| @builds << build = FactoryBot.create(:build, :state => 'succeeded', :created_at => (10 + 5 * i).minutes.ago) build_part = FactoryBot.create(:build_part, :build_instance => build) FactoryBot.create(:build_attempt, :build_part => build_part, :finished_at => build.created_at + (3 * i).minutes) end end it { should be_within(1).of(6 * 60) } context 'when there is an unsuccessful build' do before do ba = FactoryBot.create(:build_attempt, :state => 'errored') ba.build_instance.update_state_from_parts! @builds << ba.build_instance end it 'should not impact the result' do should be_within(1).of(6 * 60) end end context 'when there is an even number of builds' do before do @builds << build = FactoryBot.create(:build, :state => 'succeeded', :created_at => 45.minutes.ago) build_part = FactoryBot.create(:build_part, :build_instance => build) FactoryBot.create(:build_attempt, :build_part => build_part, :finished_at => build.created_at + 17.minutes) end it 'should average the middle two' do should be_within(1).of((6 + 9) * 30) end end end end ================================================ FILE: spec/jobs/build_partitioning_job_spec.rb ================================================ require 'spec_helper' describe BuildPartitioningJob do describe "#perform" do subject { BuildPartitioningJob.perform(id) } let(:id) { build.id } let(:build) { FactoryBot.create(:build, :state => 'runnable') } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end context "with a job runs successfully" do before do allow(GitRepo).to receive(:inside_copy).and_yield allow(Build).to receive(:find).with(id).and_return(build) allow(Partitioner).to receive(:for_build).with(build).and_return(partitioner) allow(partitioner).to receive(:partitions).and_return('PARTITIONS') allow(build).to receive(:partition).with('PARTITIONS') end let(:partitioner) { double } it "uses the partitioner to partition the build" do allow(build).to receive(:update_commit_status!) expect(partitioner).to receive(:partitions).and_return('PARTITIONS') expect(build).to receive(:partition).with('PARTITIONS') subject end it "with a pull request marks a build as pending" do expect(GithubRequest).to receive(:post) .with("#{build.repository.base_api_url}/statuses/#{build.ref}", hash_including(:state => 'pending'), anything ) subject end end context "no test_command specified" do before do build.repository.update!(test_command: nil) end it "raises an error and fails build" do expect(GithubRequest).to receive(:post) .with("#{build.repository.base_api_url}/statuses/#{build.ref}", hash_including(:state => 'failure'), anything ) subject build.reload expect(build.error_details[:message]).to include("No test_command") expect(build.build_parts.size).to eq(0) end end context "when a non-retryable error occurs" do error_message = "A name error occurred" before { allow(GitRepo).to receive(:load_kochiku_yml).and_raise(NameError.new(error_message)) } it "should re-raise the error and set the build state to errored" do expect(GithubRequest).to receive(:post) .with("#{build.repository.base_api_url}/statuses/#{build.ref}", hash_including(:state => 'failure'), anything ) expect { subject }.to raise_error(NameError) build.reload expect(build.state).to eq('errored') expect(build.error_details[:message]).to eq(error_message) expect(build.error_details[:backtrace]).not_to be_blank end end context "when a retryable error occurs" do before { allow(GitRepo).to receive(:load_kochiku_yml).and_raise(GitRepo::RefNotFoundError) } it "should re-raise the error and set the build state to waiting for sync" do expect { subject }.to raise_error(GitRepo::RefNotFoundError) expect(build.reload.state).to eq('waiting_for_sync') end end it "should have an on_failure_retry hook that will re-enqueue the job if it it gets a git ref not found error" do expect(Resque).to receive(:enqueue_in).with(60, BuildPartitioningJob, id) BuildPartitioningJob.on_failure_retry(GitRepo::RefNotFoundError.new, id) end end end ================================================ FILE: spec/jobs/build_state_update_job_spec.rb ================================================ require 'spec_helper' describe BuildStateUpdateJob do let(:repository) { FactoryBot.create(:repository, url: 'git@github.com:square/test-repo.git') } let(:branch) { FactoryBot.create(:branch, :repository => repository) } let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) } let(:name) { repository.name + "_pull_requests" } let(:current_repo_master) { build.ref } before do build.build_parts.create!(:kind => :spec, :paths => ["foo", "bar"], :queue => 'ci') build.build_parts.create!(:kind => :cucumber, :paths => ["baz"], :queue => 'ci') # TODO: This is terrible, need to fold this feedback back into the design. # We are stubbing methods that are not called from the class under test. allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) allow(GitRepo).to receive(:harmonize_remote_url) allow(GitRepo).to receive(:synchronize_with_remote).and_return(true) allow(GitRepo).to receive(:inside_repo).and_yield mocked_remote_server = RemoteServer.for_url(repository.url) allow(mocked_remote_server).to receive(:sha_for_branch).and_return(current_repo_master) allow(RemoteServer).to receive(:for_url).with(repository.url).and_return(mocked_remote_server) allow(GitBlame).to receive(:last_email_in_branch).and_return("example@email.com") allow(BuildStrategy).to receive(:update_branch) allow(GithubRequest).to receive(:post) end shared_examples "a non promotable state" do it "should not promote the build" do expect(BuildStrategy).not_to receive(:promote_build) BuildStateUpdateJob.perform(build.id) end end describe "#perform" do it "updates github when a build passes" do expect(GithubRequest).to receive(:post) .with(%r|/statuses/#{build.ref}|, hash_including(:state => 'pending'), anything ) BuildStateUpdateJob.perform(build.id) build.build_parts.each do |part| build_attempt = part.build_attempts.create!(:state => 'running') build_attempt.finish!('passed') end expect(GithubRequest).to receive(:post) .with(%r|/statuses/#{build.ref}|, hash_including(:state => 'success'), anything ) BuildStateUpdateJob.perform(build.id) end context "when a build part is still in progress" do it "does not kick off a new build unless finished" do (build.build_parts - [build.build_parts.last]).each do |part| part.build_attempts.create!(state: 'passed') end build.build_parts.last.build_attempts.create!(state: 'running') build.update_state_from_parts! expect { BuildStateUpdateJob.perform(build.id) }.to_not change(branch.builds, :count) end end context "when all parts have passed" do before do build.build_parts.each do |part| part.build_attempts.create!(state: 'passed') end build.update_state_from_parts! end describe "checking for a new commit after finish" do subject { BuildStateUpdateJob.perform(build.id) } it "doesn't kick off a new build for non convergence branch" do expect(branch.convergence?).to be(false) expect { subject }.to_not change(branch.builds, :count) end context "with a build on a convergence branch" do let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) } it "should promote the build" do expect(BuildStrategy).to receive(:promote_build).with(build) expect(BuildStrategy).not_to receive(:run_success_script) subject end context "new sha is available" do let(:current_repo_master) { "new-sha-11111111111111111111111111111111" } it "builds when there is a new sha to build" do expect { subject }.to change(branch.builds, :count).by(1) last_build = branch.builds.last expect(last_build.ref).to eq(current_repo_master) end it "kicks off a new build if attempts are running on a part that passed" do build.build_parts.first.create_and_enqueue_new_build_attempt! expect { subject }.to change(branch.builds, :count).by(1) last_build = branch.builds.last expect(last_build.ref).to eq(current_repo_master) end it "does not kick off a new build if one is already running" do branch.builds.create!(ref: 'some-other-sha-1111111111111111111111111', state: 'partitioning') expect { subject }.to_not change(branch.builds, :count) end it "does not roll back a build's state" do new_build = branch.builds.create!(ref: current_repo_master, state: 'failed') expect { subject }.to_not change(branch.builds, :count) expect(new_build.reload.state).to eq('failed') end end context "no new sha" do it "does not build" do expect { subject }.to_not change(branch.builds, :count) end end end end it "kochiku should merge the branch if eligible" do build.update!(merge_on_success: true) expect(BuildStrategy).to receive(:merge_ref).with(build) BuildStateUpdateJob.perform(build.id) end end context "when there is a success script" do let(:build) { FactoryBot.create(:build, state: 'succeeded', branch_record: branch) } before do kochiku_yaml_config = { 'on_success_script' => 'echo hip hip hooray' } allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yaml_config) end it "runs the success script" do expect(BuildStrategy).to receive(:run_success_script) BuildStateUpdateJob.perform(build.id) end context "when the success script has been run" do before do build.on_success_script_log_file = FilelessIO.new("test").tap { |fio| fio.original_filename = "bar.txt" } build.save! end it "does not run the success script" do expect(BuildStrategy).to_not receive(:run_success_script) BuildStateUpdateJob.perform(build.id) end end end context "where this is no success script" do let(:build) { FactoryBot.create(:build, state: 'succeeded', branch_record: branch) } before do kochiku_yaml_config = { } allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yaml_config) end it "does not try to execute a success script" do expect(BuildStrategy).to_not receive(:run_success_script) BuildStateUpdateJob.perform(build.id) end end context "when a part has failed but some are still running" do before do build.build_parts.first.build_attempts.create!(:state => 'failed') build.update_state_from_parts! end it_behaves_like "a non promotable state" end context "when all parts have run and some have failed" do before do (build.build_parts - [build.build_parts.first]).each do |part| ba = part.build_attempts.create!(:state => 'passed') FactoryBot.create(:stdout_build_artifact, build_attempt: ba) end failed_build_attempt = build.build_parts.first.build_attempts.create!(:state => 'failed') FactoryBot.create(:stdout_build_artifact, build_attempt: failed_build_attempt) build.update_state_from_parts! end it_behaves_like "a non promotable state" end context "when no parts" do before do build.build_parts.destroy_all end it "should not update the state" do expect { BuildStateUpdateJob.perform(build.id) }.to_not change { build.reload.state } end it_behaves_like "a non promotable state" end end end ================================================ FILE: spec/jobs/enforce_timeouts_job_spec.rb ================================================ require 'spec_helper' describe EnforceTimeoutsJob do let(:repo_timeout) { 10 } # minutes let(:repository) { FactoryBot.create(:repository, :url => 'git@github.com:square/test-repo.git', :timeout => repo_timeout) } let(:branch) { FactoryBot.create(:branch, :repository => repository) } let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) } let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :kind => :cucumber, :paths => ['baz'], :queue => 'ci') } subject { EnforceTimeoutsJob.perform } before do # Stub needed to test rebuild feature allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end it 'should mark timed-out builds as errored' do attempt1 = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 7).minutes.ago, state: 'running', builder: 'test-worker') attempt2 = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 2).minutes.ago, state: 'running', builder: 'test-worker') subject expect(attempt1.reload.state).to eq('errored') expect(attempt2.reload.state).to eq('running') end describe "automatic rebuilds" do before do @overdue_ba = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 7).minutes.ago, state: 'running', builder: 'test-worker') end context "the aborted build attempt is the most recent attempt on the BuildPart" do it "should rebuild" do expect(build_part.build_attempts.last).to eq(@overdue_ba) subject build_part.reload expect(build_part.build_attempts.last).to_not eq(@overdue_ba) end end context "the aborted build attempt is not the most recent attempt on the BuildPart" do before do BuildAttempt.create(build_part_id: build_part.id, started_at: 2.minutes.ago, state: 'running', builder: 'test-worker') end it "should not rebuild" do expect { subject }.to_not change { build_part.build_attempts.count } end end end end ================================================ FILE: spec/jobs/poll_repositories_job_spec.rb ================================================ require 'spec_helper' describe PollRepositoriesJob do subject { PollRepositoriesJob.perform } let(:repo) { branch.repository } let!(:branch) { FactoryBot.create(:convergence_branch) } before do allow(described_class).to receive(:sleep).and_return(nil) @fake_remote_server = double(:sha_for_branch => to_40("test_sha")) allow(RemoteServer).to receive(:for_url).with(repo.url).and_return(@fake_remote_server) end it "will build any new commit" do subject expect(Build.exists?(:ref => to_40("test_sha"), :branch_id => branch.id)).to be(true) end it "won't build an old commit" do FactoryBot.create(:build, :branch_record => branch, :ref => to_40("test_sha")) expect { subject }.to_not change{ branch.reload.builds.count } end # this likely means the repo has moved/renamed and the url needs to be # updated or has been deleted from the git server it "disables the repo in Kochiku if the RemoteServer returns a 404" do allow(@fake_remote_server).to receive(:sha_for_branch).and_raise(RemoteServer::RefDoesNotExist) expect { subject }.to change { repo.reload.enabled? }.from(true).to(false) end end ================================================ FILE: spec/jobs/timeout_stuck_builds_job_spec.rb ================================================ require 'spec_helper' describe TimeoutStuckBuildsJob do let(:repository) { FactoryBot.create(:repository, url: 'git@github.com:square/test-repo.git', assume_lost_after: 10) } let(:branch) { FactoryBot.create(:branch, :repository => repository) } let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) } subject { TimeoutStuckBuildsJob.perform } describe "#perform" do let(:build_attempt) { build.build_parts.create!(:kind => :spec, :paths => ["foo", "bar"], :queue => 'ci') .build_attempts.create!(:state => 'running') } let(:build_attempt_2) { build.build_parts.create!(:kind => :cucumber, :paths => ["baz"], :queue => 'ci') .build_attempts.create!(:state => 'running') } context "when a repository has assume_lost_after set" do it "should not stop builds that have yet to reach the limit" do subject expect(build_attempt.reload.state).to eq('running') expect(build_attempt_2.reload.state).to eq('running') end it "should stop builds that have reached the limit" do expect(build_attempt.state).to eq('running') build_attempt.update_attributes(started_at: 30.minutes.ago) subject expect(build_attempt.reload.state).to eq('errored') expect(build_attempt_2.reload.state).to eq('running') end end context "when a build attempt was created more then 5 minutes ago" do let(:build_attempt) { build.build_parts.create!(:kind => :cucumber, :paths => ["baz"], :queue => 'ci') .build_attempts.create!(:created_at => 10.minutes.ago, :state => 'runnable', :builder => "test") } it "should stop a build that is not queued" do expect(build_attempt.state).to eq('runnable') subject expect(build_attempt.reload.state).to eq('errored') end end end end ================================================ FILE: spec/lib/build_strategies/production_build_strategy_spec.rb ================================================ require "spec_helper" # Including the production strategy is potentially dangerous but we stub out command execution. require "#{Rails.root}/lib/build_strategies/production_build_strategy.rb" describe BuildStrategy do let(:branch) { FactoryBot.create(:branch, name: 'funyuns') } let(:build) { FactoryBot.create(:build, branch_record: branch) } before(:each) do CommandStubber.new # ensure Open3 is stubbed expect(Rails.application.config.action_mailer.delivery_method).to eq(:test) end describe "#merge_ref" do before do allow(GitRepo).to receive(:inside_copy).and_yield end context "when auto_merge is enabled" do before do expect(GitBlame).to receive(:emails_in_branch).with(an_instance_of(Build)).and_return("the-committers@example.com") end context "Using a github build" do it "should merge to master" do merger = object_double(GitMergeExecutor.new(build)) expect(GitMergeExecutor).to receive(:new).and_return(merger) expect(merger).to receive(:merge_and_push).and_return(merge_commit: to_40('a'), log_output: "This is not a drill") expect(merger).to receive(:delete_branch) expect { BuildStrategy.merge_ref(build) }.not_to raise_error end it "should handle merge failure" do merger = object_double(GitMergeExecutor.new(build)) expect(GitMergeExecutor).to receive(:new).and_return(merger) expect(merger).to receive(:merge_and_push).and_raise(GitMergeExecutor::GitMergeFailedError) expect(MergeMailer).to receive(:merge_failed).once .and_return(double('mailer', :deliver_now => nil)) expect { BuildStrategy.merge_ref(build) }.to_not raise_error end end end context "Using a stash build" do let(:stash_branch) { FactoryBot.create(:branch, repository: FactoryBot.create(:stash_repository)) } let(:stash_build) { FactoryBot.create(:build, branch_record: stash_branch) } before do settings = SettingsAccessor.new(<<-YAML) sender_email_address: kochiku@example.com kochiku_notifications_email_address: test@example.com git_servers: github.com: type: github stash.example.com: type: stash YAML stub_const "Settings", settings end it "should merge to master using stash REST api" do merger = object_double(GitMergeExecutor.new(stash_build)) expect(GitMergeExecutor).to receive(:new).and_return(merger) expect(merger).to receive(:merge_and_push).and_return(merge_commit: to_40('a'), log_output: "This is not a drill") expect(merger).to receive(:delete_branch) expect { BuildStrategy.merge_ref(build) }.not_to raise_error end end end describe "#promote_build" do subject { described_class.promote_build(build) } before do allow(GitRepo).to receive(:inside_repo).and_yield end context "when the ref is an ancestor" do before(:each) { expect(GitRepo).to receive(:included_in_promotion_ref?).and_return(true) } it "does not perform an update" do expect(described_class).to_not receive(:update_branch) subject end end context "when the ref is not an ancestor" do before(:each) { expect(GitRepo).to receive(:included_in_promotion_ref?).and_return(false) } it "should update the promotion branch" do expect(described_class).to receive(:update_branch).with(branch.repository.promotion_refs.first, build.ref) subject end end end describe "#update_branch" do subject { described_class.update_branch('last-green', 'abc123') } it "should promote a sha" do mock_git_command = double expect(mock_git_command).to receive(:run).and_return "" expect(Cocaine::CommandLine).to receive(:new).with("git push", "--force origin abc123:refs/heads/last-green").and_return mock_git_command subject end end describe "#run_success_script" do let(:repository) { branch.repository } subject { described_class.run_success_script(build) } before do allow(GitRepo).to receive(:inside_copy).and_yield expect(build).to receive(:on_success_script).and_return("./this_is_a_triumph") end it "run success script only once" do command = double("Cocaine::CommandLine", :run => "this is some output\n", :exit_status => "255") allow(Cocaine::CommandLine).to receive(:new).and_return(command) subject expect(build.reload.on_success_script_log_file.read).to eq("this is some output\n\nExited with status: 255") end end describe "#on_success_command" do let(:repository) { branch.repository } before do allow(GitRepo).to receive(:inside_copy).and_yield expect(build).to receive(:on_success_script).and_return("./this_is_a_triumph") end it "sets GIT_BRANCH and GIT_COMMIT" do command = described_class.on_success_command(build) expect(command).to include("./this_is_a_triumph") expect(command).to include("GIT_BRANCH=") expect(command).to include("GIT_COMMIT=") end end end ================================================ FILE: spec/lib/git_blame_spec.rb ================================================ require 'spec_helper' describe GitBlame do let(:build) { FactoryBot.create(:build) } describe "#emails_since_last_green" do subject { GitBlame.emails_since_last_green(build) } context "with many build breakers, and no git prefix" do before do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["User One:userone@example.com", "User Two:usertwo@example.com"]) end it "returns the emails of the users" do expect(subject).to eq(["userone@example.com", "usertwo@example.com"]) end it "will not return the same user twice" do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["User One:userone@example.com", "User One:userone@example.com"]) expect(subject).to eq(["userone@example.com"]) end end context "with a git prefix" do before do allow(Settings).to receive(:git_pair_email_prefix).and_return("git") end it "should be able to extract a single user" do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["First Last:git+userone@example.com"]) expect(subject).to eq(["userone@example.com"]) end it "should be able to extract multiple users" do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["First Last:git+one+two+three@example.com"]) expect(subject).to eq(["one@example.com", "two@example.com", "three@example.com"]) end it "does not affect users with no plus sign" do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["One:one@example.com", "Two:two@foo.example.org"]) expect(subject).to eq(["one@example.com", "two@foo.example.org"]) end it "does not affect an email with a similar format but not starting with the prefix and a plus sign" do allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(["One:github+one+two@example.com"]) expect(subject).to eq(["github+one+two@example.com"]) end end end describe "#emails_in_branch" do subject { GitBlame.emails_in_branch(build) } after do GitBlame.instance_variable_set(:@people_lookup, nil) end context "with many build breakers" do before do allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_return(["User One:userone@example.com", "User Two:usertwo@example.com"]) end it "returns the emails of the users" do expect(subject).to eq(["userone@example.com", "usertwo@example.com"]) end end context "with deleted branch" do before do allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_call_original allow(GitRepo).to receive(:inside_repo).and_yield allow(GitRepo).to receive(:branch_exist?).and_return(false) end it "should should return []" do expect(subject).to eq([]) end end end describe "#last_email_in_branch" do subject { GitBlame.last_email_in_branch(build) } before do allow(GitBlame).to receive(:last_git_name_and_email_in_branch).and_return("User One:userone@example.com") end it "returns a single email" do expect(subject).to eq(["userone@example.com"]) end end describe "#changes_since_last_green" do subject { GitBlame.changes_since_last_green(build) } before do allow(GitBlame).to receive(:changes_since_last_green).and_call_original end it "should parse the git log message and return a hash of information" do allow(GitRepo).to receive(:inside_repo).and_return("::!::817b88be7488cab5e4f9d9975222db80d8bceb3b|User One |Fri Oct 19 17:43:47 2012 -0700|this is my commit message::!::") git_changes = subject expect(git_changes.first[:hash]).to eq("817b88be7488cab5e4f9d9975222db80d8bceb3b") expect(git_changes.first[:author]).to eq("User One ") expect(git_changes.first[:date]).to eq("Fri Oct 19 17:43:47 2012 -0700") expect(git_changes.first[:message]).to eq("this is my commit message") end it "should strip new lines in the commit message" do allow(GitRepo).to receive(:inside_repo).and_return("::!::817b88|User One|Fri Oct 19|this is my commit message\nanother line::!::") git_changes = subject expect(git_changes.first[:message]).to eq("this is my commit message another line") end end describe "#changes_in_branch" do subject { GitBlame.changes_in_branch(build) } before do allow(GitBlame).to receive(:changes_in_branch).and_call_original end it "should parse the git log message and return a hash of information" do allow(GitRepo).to receive(:inside_repo).and_return("::!::817b88be7488cab5e4f9d9975222db80d8bceb3b|User One |Fri Oct 19 17:43:47 2012 -0700|this is my commit message::!::") git_changes = subject expect(git_changes.first[:hash]).to eq("817b88be7488cab5e4f9d9975222db80d8bceb3b") expect(git_changes.first[:author]).to eq("User One ") expect(git_changes.first[:date]).to eq("Fri Oct 19 17:43:47 2012 -0700") expect(git_changes.first[:message]).to eq("this is my commit message") end end describe "#files_changed_since_last_build" do let(:options) { {} } subject { GitBlame.files_changed_since_last_build(build, options) } before do allow(GitBlame).to receive(:files_changed_since_last_build).and_call_original end it "should parse the git log and return change file paths" do allow(GitRepo).to receive(:inside_repo).and_return("::!::User One:userone@example.com::!::\n\npath/one/file.java\npath/two/file.java") git_file_changes = subject expect(git_file_changes.size).to eq(2) expect(git_file_changes).to include({:file => "path/one/file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/two/file.java", :emails => []}) end context "includes merge commit modifying files that aren't changed in parent commits" do # [hack] the git-sha's are not stable on test runs, so we need to use git tags to mark commits let(:previous_build) { instance_double('Build', :ref => "a") } before do build.update(:ref => "d") allow(build).to receive(:previous_build).and_return previous_build allow(GitRepo).to receive(:inside_repo) do |build, sync_repo, &block| Dir.mktmpdir do |directory| Dir.chdir(directory) do suppressed_git_init `git config user.email "test@example.com" && git config user.name "test"` FileUtils.touch("TESTFILE") `git add -A && git commit -m "commit 1" && git tag a` `git checkout -q -b branch` FileUtils.touch("TESTFILE2") `git add -A && git commit -m "commit 2" && git tag b` `git checkout -q -` FileUtils.touch("TESTFILE3") `git add -A && git commit -m "commit 3" && git tag c` # --no-commit allows us to change arbitrary files, like in merge conflict resolution `git merge branch --no-ff --no-commit 2> /dev/null` # modify a new file during the merge not modified by parents FileUtils.touch("NEWFILE") `git add -A && git commit -m "merge commit" && git tag d` block.call end end end end it "should include all files modified in merge commit" do git_file_changes = subject expect(git_file_changes).to_not include({:file => "TESTFILE", :emails => []}) expect(git_file_changes).to include({:file => "TESTFILE2", :emails => []}) expect(git_file_changes).to include({:file => "TESTFILE3", :emails => []}) expect(git_file_changes).to include({:file => "NEWFILE", :emails => []}) end end end describe "#files_changed_since_last_green" do let(:options) { {} } subject { GitBlame.files_changed_since_last_green(build, options) } before do allow(GitBlame).to receive(:files_changed_since_last_green).and_call_original end it "should parse the git log and return change file paths" do allow(GitRepo).to receive(:inside_repo).and_return("::!::User One:userone@example.com::!::\n\npath/one/file.java\npath/two/file.java") git_file_changes = subject expect(git_file_changes.size).to eq(2) expect(git_file_changes).to include({:file => "path/one/file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/two/file.java", :emails => []}) end context "fetch emails with files changes" do let(:options) { {:fetch_emails => true} } it "should parse the git log and return change file paths with emails" do allow(GitRepo).to receive(:inside_repo).and_return("::!::User One:userone@example.com::!::\n\npath/one/file.java\npath/two/file.java\n::!::User Two:usertwo@example.com::!::\n\npath/three/file.java") git_file_changes = subject expect(git_file_changes.size).to eq(3) expect(git_file_changes).to include({:file => "path/one/file.java", :emails => ["userone@example.com"]}) expect(git_file_changes).to include({:file => "path/two/file.java", :emails => ["userone@example.com"]}) expect(git_file_changes).to include({:file => "path/three/file.java", :emails => ["usertwo@example.com"]}) end it "should return nothing if the line doesn't have an email" do allow(GitRepo).to receive(:inside_repo).and_return("::!::::!::\n") expect(subject).to be_empty end end end describe "#files_changed_in_branch" do let(:options) { {} } subject { GitBlame.files_changed_in_branch(build, options) } before do allow(GitBlame).to receive(:files_changed_in_branch).and_call_original end it "should parse the git log and return change file paths" do allow(GitRepo).to receive(:inside_repo).and_return("::!::User One:userone@example.com::!::\n\npath/one/file.java\npath/two/file.java") git_file_changes = subject expect(git_file_changes.size).to eq(2) expect(git_file_changes).to include({:file => "path/one/file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/two/file.java", :emails => []}) end end describe "#net_files_changed_in_branch" do let(:options) { {} } let(:git_merge_base_command) { "git merge-base master #{build.branch_record.name}" } let(:git_merge_base_output) { '12345' } let(:git_diff_command) { "git diff --name-status --find-renames --find-copies '12345..#{build.branch_record.name}'" } let(:git_diff_output) { <<-GITDIFF D path/to/deleted_file.java M path/to/modified_file.java A path/to/added_file.java R097 path/to/original_name.java path/to/new_name.java GITDIFF } subject { GitBlame.net_files_changed_in_branch(build, options) } before do allow(GitBlame).to receive(:net_files_changed_in_branch).and_call_original allow(GitRepo).to receive(:inside_repo).and_yield diff_double = double('git diff') allow(diff_double).to receive(:run).and_return(git_diff_output) allow(Cocaine::CommandLine).to receive(:new).with(git_diff_command) { diff_double } merge_base_double = double('git merge-base') allow(merge_base_double).to receive(:run).and_return(git_merge_base_output) allow(Cocaine::CommandLine).to receive(:new).with(git_merge_base_command) { merge_base_double } end it "should parse the git diff and return change file paths" do git_file_changes = subject expect(git_file_changes.size).to eq(5) expect(git_file_changes).to include({:file => "path/to/added_file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/to/deleted_file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/to/modified_file.java", :emails => []}) expect(git_file_changes).to include({:file => "path/to/original_name.java", :emails => []}) expect(git_file_changes).to include({:file => "path/to/new_name.java", :emails => []}) end end end ================================================ FILE: spec/lib/git_merge_executor_spec.rb ================================================ require 'spec_helper' require 'git_merge_executor' describe GitMergeExecutor do before do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github stash.example.com: type: stash YAML stub_const "Settings", settings end describe "#merge_and_push" do let(:build) { FactoryBot.create(:build) } let(:merger) { described_class.new(build) } subject { merger.merge_and_push } context "when merge succeeds" do before(:each) do @stubber = CommandStubber.new end it "should not raise exceptions" do merge_info = subject expect(merge_info).to have_key(:merge_commit) expect(merge_info).to have_key(:log_output) @stubber.check_cmd_executed("git merge") end end context "when merge fails due to merge conflicts" do before(:each) do @stubber = CommandStubber.new @stubber.stub_capture2e_failure("git merge") end it "should raise an exception" do expect { subject }.to raise_error(described_class::GitMergeFailedError) end end context "when push fails it resets the git repo and tries again" do before(:each) do status_success = double('Process::Status', :success? => true) allow(merger).to receive(:sleep) allow(Open3).to receive(:capture2e).and_return(["", status_success]) end it "should raise an exception" do status_failure = double('Process::Status', :success? => false) expect(Open3).to receive(:capture2e).with(/git push/) .and_return(["", status_failure]) .exactly(3).times expect { subject }.to raise_error(described_class::GitPushFailedError) end end end end ================================================ FILE: spec/lib/git_repo_spec.rb ================================================ require 'spec_helper' describe GitRepo do before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github YAML stub_const "Settings", settings end describe "#synchronize_with_remote" do it "should throw an exception after the third fetch attempt" do fetch_double = double('git fetch') expect(fetch_double).to receive(:run).exactly(3).times.and_raise(Cocaine::ExitStatusError) allow(Cocaine::CommandLine).to receive(:new).with('git fetch', anything) { fetch_double } expect(GitRepo).to receive(:sleep).exactly(2).times expect { GitRepo.send(:synchronize_with_remote, "master") }.to raise_error(Cocaine::ExitStatusError) end end describe '#inside_repo' do before do FileUtils.rm_rf GitRepo::WORKING_DIR FileUtils.mkdir GitRepo::WORKING_DIR end it 'updates the remote URL of the cached copy if the remote URL has changed' do # Stipulates that the namespace and the name of the repo are still the # same. This is for situations where something else about the url # changed, e.g. switched to a git mirror Dir.mktmpdir do |old_remote| Dir.mktmpdir do |new_remote| Dir.chdir(old_remote) do suppressed_git_init FileUtils.touch("TESTFILE") `git add -A` `git commit -m "Initial commit"` end repository = double('Repository', namespace: 'sq', name: 'fun-with-flags', url: 'push-url', url_for_fetching: old_remote) # Clone the repo first time, prime the cache. GitRepo.inside_repo(repository, sync: false) {} # make a copy of the faux remote FileUtils.cp_r("#{old_remote}/.", new_remote, verbose: false) allow(repository).to receive(:url_for_fetching).and_return(new_remote) # retrieve the updated url of the repository updated_remote = nil GitRepo.inside_repo(repository, sync: false) do updated_remote = `git config --get remote.origin.url`.chomp end expect(updated_remote).to eq(new_remote) end end end end describe "#branch_exist?" do before do allow(GitRepo).to receive(:inside_repo) do |_build, _sync_repo, &block| Dir.mktmpdir do |directory| Dir.chdir(directory) do suppressed_git_init `git config user.email "test@example.com" && git config user.name "test"` FileUtils.touch("TESTFILE") `git add -A && git commit -m "commit 1" && git tag a` block.call end end end end it "should return false for nonexisting branch" do GitRepo.inside_repo('repo') do expect(GitRepo.branch_exist?('fake_branch')).to be false end end it "should return true for existing branch" do GitRepo.inside_repo('repo') do expect(GitRepo.branch_exist?('master')).to be true end end end end ================================================ FILE: spec/lib/github_commit_status_spec.rb ================================================ require 'spec_helper' describe GithubCommitStatus do subject { GithubCommitStatus.new(build, oauth_token) } let(:oauth_token) { "my_test_token" } let(:build) { FactoryBot.create(:build) } before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github github.com: type: github YAML stub_const "Settings", settings end it "marks a build as pending" do build.update_attributes!(:state => 'running') expect(GithubRequest).to receive(:post) .with(%r|/statuses/#{build.ref}|, hash_including(:state => 'pending'), oauth_token ).and_return(commit_status_response) subject.update_commit_status! end it "marks a build as success" do build.update_attributes!(:state => 'succeeded') expect(GithubRequest).to receive(:post) .with(%r|/statuses/#{build.ref}|, hash_including(:state => 'success'), oauth_token ).and_return(commit_status_response) subject.update_commit_status! end it "marks a build as failure" do build.update_attributes!(:state => 'failed') expect(GithubRequest).to receive(:post) .with(%r|/statuses/#{build.ref}|, hash_including(:state => 'failure'), oauth_token ).and_return(commit_status_response) subject.update_commit_status! end it "uses a repos github url" do build.branch_record.update_attributes!(:repository => FactoryBot.create(:repository, :url => "git@github.com:square/kochiku-worker.git")) build.update_attributes!(:state => 'failed') build.reload expect(GithubRequest).to receive(:post) .with("https://api.github.com/repos/square/kochiku-worker/statuses/#{build.ref}", anything, anything).and_return(commit_status_response) subject.update_commit_status! end def commit_status_response '{"description":"Build is running","creator":{"gravatar_id":"56fdde43fb3bd6cf62bbec24dc8cb682","login":"nolan","url":"https://git.example.com/api/v3/users/nolan","avatar_url":"https://secure.gravatar.com/avatar/56fdde43fb3bd6cf62bbec24dc8cb682?d=https://git.example.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png","id":41},"updated_at":"2012-10-06T02:59:18Z","created_at":"2012-10-06T02:59:18Z","state":"success","url":"https://git.example.com/api/v3/repos/square/web/statuses/22","target_url":"https://kochiku.example.com/square/web/builds/5510","id":22}' end end ================================================ FILE: spec/lib/github_post_receive_hook_spec.rb ================================================ require 'spec_helper' require 'github_post_receive_hook' describe GithubPostReceiveHook do subject { GithubPostReceiveHook.new(repository, 'github_oauth_token_test') } let(:repository) { FactoryBot.create(:repository, :url => "git@git.example.com:square/web.git") } before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github YAML stub_const "Settings", settings end it "does not recreate the hook if it already exists" do stub_request(:get, "https://git.example.com/api/v3/repos/square/web/hooks") .to_return(:body => github_hooks) subject.subscribe! end it "creates the hook" do stub_request(:get, "https://git.example.com/api/v3/repos/square/web/hooks") .to_return(:body => '[]') stub_request(:post, "https://git.example.com/api/v3/repos/square/web/hooks").with do |request| body = JSON.parse(request.body) expect(body["name"]).to eq("web") expect(body["events"]).to eq(['pull_request']) expect(body["active"]).to eq(true) expect(body["config"]["url"]).to eq("http://localhost:3001/pull-request-builder") true end.to_return(:body => github_hooks) subject.subscribe! end it "updates a repositories github_post_receive_hook_id" do expect(repository.github_post_receive_hook_id).to eq(nil) stub_request(:get, "https://git.example.com/api/v3/repos/square/web/hooks") .to_return(:body => github_hooks) subject.subscribe! expect(repository.github_post_receive_hook_id).to eq(78) end it "updates an existing hook" do repository.update_attributes!(:github_post_receive_hook_id => 78) called = false stub_request(:patch, "https://git.example.com/api/v3/repos/square/web/hooks/78").with do |request| body = JSON.parse(request.body) expect(body["name"]).to eq("web") expect(body["events"]).to eq(['pull_request']) expect(body["active"]).to eq(true) expect(body["config"]["url"]).to eq("http://localhost:3001/pull-request-builder") called = true true end.to_return(:body => github_hooks) subject.subscribe! expect(called).to be true end def github_hooks "[{\"active\":true,\"updated_at\":\"2012-10-09T19:02:47Z\",\"last_response\":{\"status\":\"unused\",\"message\":null,\"code\":null},\"events\":[\"pull_request\"],\"created_at\":\"2012-10-09T19:02:47Z\",\"url\":\"https://git.example.com/api/v3/repos/square/kochiku/hooks/78\",\"name\":\"web\",\"config\":{\"url\":\"http://localhost:3001/pull-request-builder\"},\"id\":78}]" end end ================================================ FILE: spec/lib/github_request_spec.rb ================================================ require 'spec_helper' require 'github_request' describe GithubRequest do let(:url) { "https://git.example.com/api/something-or-other" } let(:oauth_token) { "my_test_token" } RSpec.shared_examples "a github api request" do it "should properly include oauth token in header" do stub_request(http_verb, url).with do |request| expect(request.headers["Authorization"]).to eq("token #{oauth_token}") true end subject end it "should specify Github API v3" do stub_request(http_verb, url).with do |request| expect(request.headers["Accept"]).to eq("application/vnd.github.v3+json") true end subject end end RSpec.shared_examples "a modifying github api request" do it "should JSON encode request data" do stub_request(http_verb, url).with do |request| body = JSON.parse(request.body) expect(body).to eq(request_data) true end subject end end describe ".get" do let(:http_verb) { :get } subject { GithubRequest.get(url, oauth_token) } include_examples "a github api request" end describe ".post" do let(:http_verb) { :post } let(:request_data) { {"arg1" => {"arg2" => "value1"}} } subject { GithubRequest.post(url, request_data, oauth_token) } include_examples "a github api request" include_examples "a modifying github api request" end describe ".patch" do let(:http_verb) { :patch } let(:request_data) { {"arg1" => {"arg2" => "value1"}} } subject { GithubRequest.patch(url, request_data, oauth_token) } include_examples "a github api request" include_examples "a modifying github api request" end end ================================================ FILE: spec/lib/partitioner/default_spec.rb ================================================ require 'lib/partitioner/shared_default_behavior' describe Partitioner::Default do include_examples "Partitioner::Default behavior", Partitioner::Default end ================================================ FILE: spec/lib/partitioner/dependency_map_spec.rb ================================================ require 'lib/partitioner/shared_default_behavior' describe Partitioner::DependencyMap do include_examples "Partitioner::Default behavior", Partitioner::DependencyMap describe '#partitions' do let(:build) { FactoryBot.create(:build) } let(:partitioner) { Partitioner::DependencyMap.new(build, build.kochiku_yml) } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml) allow(GitRepo).to receive(:inside_copy).and_yield allow(Dir).to receive(:[]) do |*globs| matched_files = [] matched_files << 'source_glob/1/foo.rb' if globs.include?('source_glob/1/**') matched_files << 'test_glob/1/bar_spec.rb' if globs.include?('test_glob/1/**') matched_files << 'source_glob/2_part_1/foo.rb' if globs.include?('source_glob/2_part_1/**') matched_files << 'source_glob/2_part_2/foo.rb' if globs.include?('source_glob/2_part_2/**') matched_files << 'test_glob/2_part_1/bar_spec.rb' if globs.include?('test_glob/2_part_1/**') matched_files << 'test_glob/2_part_2/bar_spec.rb' if globs.include?('test_glob/2_part_2/**') matched_files << 'test_glob/default/foo/bar/baz_spec.rb' if globs.include?('test_glob/default/**') matched_files << 'glob/foo/bar.rb' if globs.include?('glob/**') matched_files end allow(GitBlame).to receive(:net_files_changed_in_branch) do changed_files.map { |file| {:file => file, :emails => []} } end end let(:changed_files) { [] } let(:kochiku_yml) do { 'dependency_map_options' => { 'run_all_tests_for_branches' => [ 'master' ] }, 'targets' => [target] } end let(:dependency_map) do [ { 'source_glob' => 'source_glob/1/**', 'test_glob' => 'test_glob/1/**' }, { 'source_glob' => %w( source_glob/2_part_1/** source_glob/2_part_2/** ), 'test_glob' => %w( test_glob/2_part_1/** test_glob/2_part_2/** ) } ] end subject(:partitions) { partitioner.partitions } context 'when dependency_map is defined' do let(:target) do { 'type' => 'karma_chrome', 'dependency_map' => dependency_map, 'default_test_glob' => 'test_glob/default/**' } end context 'on a branch where all tests should be run' do let(:build) { FactoryBot.create(:build, :branch_record => FactoryBot.create(:master_branch)) } context 'when one of the source_globs matches changed files' do let(:changed_files) { %w(source_glob/1/foo.rb) } it 'should add all test globs to partition' do expect(partitions.first['files']).to( eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb test_glob/default/foo/bar/baz_spec.rb)) ) end end context 'when none of the source_globs match the changed files' do let(:changed_files) { %w(does_not_match_any_source) } it 'should add all test globs to partition' do expect(partitions.first['files']).to( eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb test_glob/default/foo/bar/baz_spec.rb)) ) end end end context 'on a branch where tests should be isolated' do context 'when one of the source_globs matches changed files' do let(:changed_files) { %w(source_glob/1/foo.rb) } it 'adds only the tests for that glob to the partition' do expect(partitions.first['files']).to eq(%w(test_glob/1/bar_spec.rb)) end context 'but no test_glob is provided' do let(:dependency_map) do [ { 'source_glob' => 'source_glob/1/**' } ] end it 'does not add any partitions' do expect(partitions).to eq([]) end end end context 'when multiple source_globs match changed files' do let(:changed_files) { %w(source_glob/1/foo.rb source_glob/2_part_1/foo.rb) } it 'adds the tests for both globs to the partition' do expect(partitions.first['files']).to( eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb)) ) end end context 'when none of the source_globs match the changed files' do let(:changed_files) { %w(does_not_match_any_source) } context 'when default_test_glob is defined' do it 'adds only default_test_glob tests to the partition' do expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb)) end end context 'when default_test_glob is undefined' do let(:target) do { 'type' => 'karma_chrome', 'dependency_map' => dependency_map } end it 'adds no tests to the partition' do expect(partitions).to eq([]) end end end context 'when workers are defined for the maps' do let(:target) do { 'type' => 'karma_chrome', 'workers' => 5, 'dependency_map' => dependency_map } end let(:dependency_map) do [ { 'source_glob' => 'source_glob/1/**', 'test_glob' => 'test_glob/1/**', 'workers' => 1 }, { 'source_glob' => %w( source_glob/2_part_1/** source_glob/2_part_2/** ), 'test_glob' => %w( test_glob/2_part_1/** test_glob/2_part_2/** ), 'workers' => 1 } ] end context 'when one of the source_globs matches changed files' do let(:changed_files) { %w(source_glob/1/foo.rb) } it 'creates partitions for the number of workers specified by that mapping' do expect(partitions.size).to eq(1) end end context 'when multiple source_globs match changed files' do let(:changed_files) { %w(source_glob/1/foo.rb source_glob/2_part_1/foo.rb) } it 'creates partitions for the sum of the workers specified by those mappings' do expect(partitions.size).to eq(2) end context 'and the max workers for the target is less than the sum of those workers' do let(:target) do { 'type' => 'karma_chrome', 'workers' => 1, 'dependency_map' => dependency_map } end it 'creates partitions for the max workers specified by the target' do expect(partitions.size).to eq(1) end end end end end end context 'when dependency_map is undefined' do context 'when glob is defined' do context 'when default_test_glob is undefined' do let(:target) do { 'type' => 'karma_chrome', 'glob' => 'glob/**' } end it 'adds tests from glob to partition' do expect(partitions.first['files']).to eq(%w(glob/foo/bar.rb)) end end context 'when default_test_glob is defined' do let(:target) do { 'type' => 'karma_chrome', 'glob' => 'glob/**', 'default_test_glob' => 'test_glob/default/**' } end it 'adds tests from default_test_glob to partition' do expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb)) end end end context 'when glob is undefined' do context 'when default_test_glob is undefined' do let(:target) do { 'type' => 'karma_chrome' } end it 'does not make any partitions' do expect(partitions).to eq([]) end end context 'when default_test_glob is defined' do let(:target) do { 'type' => 'karma_chrome', 'default_test_glob' => 'test_glob/default/**' } end it 'adds tests from default_test_glob to partition' do expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb)) end end end end end end ================================================ FILE: spec/lib/partitioner/go_spec.rb ================================================ require 'spec_helper' require 'partitioner/go' describe Partitioner::Go do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:master_branch, repository: repository, name: "master") } let!(:build) { FactoryBot.create(:build, branch_record: branch) } let(:kochiku_yml) { nil } subject { Partitioner::Go.new(build, kochiku_yml) } before do allow(GitRepo).to receive(:inside_copy).and_yield 'some_dir' end context "with actual files" do let(:go_list_output) { <<~OUTPUT { "ImportPath": "/vendor/test", "Deps": [ "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ] } { "ImportPath": "liba", "Deps": [ "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ] } { "ImportPath": "libb", "Deps": [ "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ] } { "ImportPath": "libc/test", "Deps": [ "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ] } { "ImportPath": "libc", "Imports": [ "liba" ], "Deps": [ "liba", "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ], "TestImports": [ "libb", "testing" ] } { "ImportPath": "libd", "Imports": [ "libc" ], "Deps": [ "liba", "libc", "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ], "TestImports": [ "libc", "testing" ] } { "ImportPath": "libe", "Imports": [ "libb" ], "Deps": [ "libb", "runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe" ], "XTestImports": [ "liba", "testing" ] } OUTPUT } before do go_list_double = double('go list') allow(go_list_double).to receive(:run).and_return(go_list_output) allow(Cocaine::CommandLine).to receive(:new).and_return(go_list_double) end describe "#package_info_map" do it "it should get the package info map" do pinfo_map = subject.package_info_map expect(pinfo_map["liba"]["ImportPath"]).to eq("liba") expect(pinfo_map["liba"]["Deps"]).to eq(["runtime", "runtime/internal/atomic", "runtime/internal/sys", "unsafe"]) expect(pinfo_map["libb"]["ImportPath"]).to eq("libb") expect(pinfo_map["libc"]["Imports"]).to eq(["liba"]) end end describe "#module_dependency_map" do it "it should get the dependencies" do dep_map = subject.package_dependency_map expect(dep_map["liba"]).to eq(%w[liba libc libe_test].to_set) expect(dep_map["libb"]).to eq(%w[libb libc libe].to_set) expect(dep_map["libc"]).to eq(%w[libc libd].to_set) expect(dep_map["libd"]).to eq(%w[libd].to_set) expect(dep_map["libe"]).to eq(%w[libe].to_set) end end describe "#depends_on_map" do it "it should get the dependencies" do dep_map = subject.depends_on_map expect(dep_map["liba"]).to eq(%w[liba libc libd libe_test].to_set) expect(dep_map["libb"]).to eq(%w[libb libc libe].to_set) expect(dep_map["libc"]).to eq(%w[libc libd].to_set) expect(dep_map["libd"]).to eq(%w[libd].to_set) expect(dep_map["libe"]).to eq(%w[libe].to_set) end end describe "#all_packages" do it 'should filter /vendor' do expect(subject.all_packages.include?("/vendor/test")).to eq(false) expect(subject.all_packages.include?("liba")).to eq(true) end end describe "#add_partitions" do it 'should create partitions for all target_types' do partitions = subject.add_partitions(subject.all_packages) expect(partitions.size).to eq(subject.all_packages_target_types.size + subject.top_level_packages_target_types.size) end end describe "#package_folders_map" do it 'should return the packages as folders' do folder_map = subject.package_folders_map(subject.all_packages) expect(folder_map["liba"]).to eq(%w[./liba/]) expect(folder_map["libc"]).to eq(%w[./libc/test/ ./libc/]) end end describe "#failed_convergence_tests" do it 'should return an empty array if there is no previous build' do expect(subject.failed_convergence_tests).to eq(%w[]) end it 'should return the failed paths on a previous build' do failed_build = FactoryBot.create(:completed_build, branch_record: branch, num_build_parts: 1, state: 'failed') allow_any_instance_of(Build).to receive(:previous_build).and_return(failed_build) expect(subject.failed_convergence_tests).to eq(%w[/foo/1.test foo/baz/a.test foo/baz/b.test]) end end describe "#file_to_packages" do it 'should return paths based on a files package dependencies for a .go file' do expect(subject.file_to_packages("libb/test.go")).to eq(%w[libb libc libe]) end it 'should return the path of the toplevel package for a non .go file' do expect(subject.file_to_packages("libb/readme.md")).to eq(%w[libb]) end end describe "#add_with_split" do it 'should handle an empty package_list' do expect(subject.add_with_split([], "test", 2)).to be_nil end end end end ================================================ FILE: spec/lib/partitioner/maven_spec.rb ================================================ require 'spec_helper' require 'partitioner/maven' describe Partitioner::Maven do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:master_branch, repository: repository, name: "master") } let(:build) { FactoryBot.create(:build, branch_record: branch) } let(:kochiku_yml) { nil } subject { Partitioner::Maven.new(build, kochiku_yml) } before do allow(GitRepo).to receive(:inside_copy).and_yield end describe "#group_modules" do it "should group modules based on the top level directory" do modules = ["a", "b", "b/1", "b/2", "b/1/2", "c/1"] partitions = subject.group_modules(modules) expect(partitions.size).to eq(3) expect(partitions).to include(a_hash_including({ 'files' => ['a'] })) expect(partitions).to include(a_hash_including({ 'files' => ['b', 'b/1', 'b/1/2', 'b/2'] })) expect(partitions).to include(a_hash_including({ 'files' => ['c/1'] })) end context "with expand_directories" do let(:kochiku_yml) { { 'maven_settings' => { 'expand_directories' => ['b'], } } } it "should break down modules when included in expand_directories" do modules = ["a", "b", "b/elephant", "b/elephant/elephant-protos", "b/mint",] partitions = subject.group_modules(modules) expect(partitions.size).to eq(4) expect(partitions).to include(a_hash_including({ 'files' => ['a'] })) expect(partitions).to include(a_hash_including({ 'files' => ['b'] })) expect(partitions).to include(a_hash_including({ 'files' => ['b/elephant', 'b/elephant/elephant-protos'] })) expect(partitions).to include(a_hash_including({ 'files' => ['b/mint'] })) end end end describe "#partitions" do context "on a convergence branch" do before do expect(build.branch_record).to be_convergence allow(subject).to receive(:sort_modules) { |mvn_modules| mvn_modules } end context "for a given set of file changes" do before do allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything) .and_return([{:file => "module-one/src/main/java/com/lobsters/foo.java", :emails => []}, {:file => "module-two/src/main/java/com/lobsters/bar.java", :emails => []}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("module-one/pom.xml").and_return(true) allow(File).to receive(:exist?).with("module-two/pom.xml").and_return(true) allow(subject).to receive(:maven_modules).and_return(["module-one", "module-two", "module-two/integration", "module-three", "module-four"]) allow(subject).to receive(:depends_on_map).and_return( { "module-one" => ["module-one", "module-three", "module-four"].to_set, "module-two" => ["module-two", "module-two/integration", "module-three"].to_set, } ) expect(subject).to_not receive(:all_partitions) end it "should return the set of modules to build" do partitions = subject.partitions expect(partitions.first['type']).to eq('maven') # This should be true for all partitioner actions expect(partitions.first['options']).to_not include('log_file_globs') # Unless log_file_globs is set expect(partitions.size).to eq(4) expect(partitions).to include(a_hash_including({ 'files' => ['module-one'] })) expect(partitions).to include(a_hash_including({ 'files' => ['module-two', 'module-two/integration'] })) expect(partitions).to include(a_hash_including({ 'files' => ['module-three'] })) expect(partitions).to include(a_hash_including({ 'files' => ['module-four'] })) end context "multiple workers are specified for a module" do let(:kochiku_yml) { { 'maven_settings' => { 'multiple_workers' => {'module-one' => 3} } } } it "should return set of modules to build, with separate entries for each test chunk" do partitions = subject.partitions expect(partitions.size).to eq(6) expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 1}})) expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 2}})) expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 3}})) expect(partitions).to include(a_hash_including({ 'files' => ['module-three'], 'options' => {}})) end end context "with always_build set" do let(:kochiku_yml) { { 'maven_settings' => { 'always_build' => ['module-b'], } } } it "should always include the always_build in the partitions" do partitions = subject.partitions expect(partitions.size).to eq(5) expect(partitions).to include(a_hash_including({ 'files' => ['module-b'] })) end end context 'with log_file_globs' do let(:kochiku_yml) { { 'log_file_globs' => log_files } } context 'that uses a single string' do let(:log_files) { 'mylog.log' } it 'puts an array into the options' do partitions = subject.partitions expect(partitions.first['options']['log_file_globs']).to eq(['mylog.log']) end end context 'that uses an array' do let(:log_files) { ['mylog.log', 'another.log'] } it 'puts the array into the options' do partitions = subject.partitions expect(partitions.first['options']['log_file_globs']).to eq(['mylog.log', 'another.log']) end end end context "with ignore_paths set" do let(:kochiku_yml) { { 'maven_settings' => { 'ignore_paths' => ['module-two'], } } } it "should not return partitions for an ignored directory" do partitions = subject.partitions expect(partitions.size).to eq(3) expect(partitions).to include(a_hash_including({ 'files' => ['module-one'] })) expect(partitions).to include(a_hash_including({ 'files' => ['module-three'] })) expect(partitions).to include(a_hash_including({ 'files' => ['module-four'] })) end end end context "with a previous build on the same branch" do let(:build2) { FactoryBot.create(:build, branch_record: branch) } subject { Partitioner::Maven.new(build2, kochiku_yml) } it "should add all the non-successful parts from the previous build" do build_part = FactoryBot.create(:build_part, :build_instance => build, :paths => ["module-one"]) expect(build.build_parts.first).to be_unsuccessful partitions = subject.partitions expect(partitions.size).to eq(1) expect(partitions).to include( a_hash_including("files" => build_part.paths, "queue" => build_part.queue.to_s) ) end it "should not add all successful parts from the previous build" do build_part = FactoryBot.create(:build_part, :build_instance => build, :paths => ["module-one"]) FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') expect(build.build_parts.first).to be_successful partitions = subject.partitions expect(partitions.size).to eq(0) end end context "with build_everything set" do let(:kochiku_yml) { { 'maven_settings' => { 'build_everything' => ['build-all'], } } } it "should build everything if one of the changed file starts with a path in build_everything" do allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything) .and_return([{:file => "build-all/src/main/java/com/lobsters/foo.java", :emails => []}]) allow(File).to receive(:exist?).and_return(false) allow(subject).to receive(:pom_for).and_return "" allow(subject).to receive(:maven_modules).and_return(["module-one", "build-all"]) allow(subject).to receive(:depends_on_map).and_return( { "module-one" => ["module-one", "build-all"].to_set, "build-all" => ["build-all"].to_set, } ) expect(subject).to receive(:all_partitions).and_return([{"type" => "maven", "files" => "ALL"}]) partitions = subject.partitions expect(partitions.size).to eq(1) expect(partitions.first).to match('type' => 'maven', 'files' => 'ALL', 'options' => {}) end end it "should build everything if one of the files does not map to a module" do allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything) .and_return([{:file => "toplevel/foo.xml", :emails => []}]) allow(subject).to receive(:depends_on_map).and_return( { "module-one" => ["module-one", "module-three", "module-four"].to_set, "module-two" => ["module-two", "module-three"].to_set } ) expect(subject).to receive(:all_partitions).and_return([{"type" => "maven", "files" => "ALL"}]) partitions = subject.partitions expect(partitions.size).to eq(1) expect(partitions.first).to match('type' => 'maven', 'files' => 'ALL', 'options' => {}) end it "should not fail if a file is referenced in a top level module that is not in the top level pom" do allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything) .and_return([{:file => "new-module/src/main/java/com/lobsters/foo.java", :emails => []}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("new-module/pom.xml").and_return(true) allow(subject).to receive(:maven_modules).and_return(["module-one", "module-two"]) allow(subject).to receive(:depends_on_map).and_return( { "module-one" => ["module-one", "module-three", "module-four"].to_set, "module-two" => ["module-two", "module-three"].to_set } ) expect(subject).to_not receive(:all_partitions) partitions = subject.partitions expect(partitions.size).to eq(0) end context "with options" do let(:kochiku_yml) { { 'log_file_globs' => 'mylog.log', 'retry_count' => 5 } } it "should include options in the event of a partial build" do allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything) .and_return([{:file => "toplevel/foo.xml", :emails => []}]) expect(subject).to receive(:all_partitions).and_return([{"type" => "maven", "files" => "ALL"}]) partitions = subject.partitions expect(partitions.size).to be > 0 expect(partitions.first).to match a_hash_including('options' => {'log_file_globs' => ['mylog.log'], 'retry_count' => 5}) end end end context "on a non-convergence branch" do let(:branch) { FactoryBot.create(:branch, convergence: false) } # let(:build) { FactoryBot.create(:build, :branch => "branch-of-master") } before do expect(build.branch_record).to_not be_convergence end context "with a previous build" do let(:build2) { FactoryBot.create(:build, branch_record: FactoryBot.create(:master_branch)) } subject { Partitioner::Maven.new(build2, kochiku_yml) } it "should NOT add all the non-successful parts from the previous build" do FactoryBot.create(:build_part, :build_instance => build, :paths => ["module-one"]) expect(build.build_parts.first).to be_unsuccessful allow(subject).to receive(:sort_modules) { |mvn_modules| mvn_modules } partitions = subject.partitions expect(partitions.size).to eq(0) end end end end describe "#emails_for_commits_causing_failures" do it "should return nothing if there are no failed parts" do expect(build.build_parts.failed_or_errored).to be_empty emails = subject.emails_for_commits_causing_failures expect(emails).to be_empty end context "with a module that failed to build" do before do build_part = FactoryBot.create(:build_part, :paths => ["failed-module"], :build_instance => build) FactoryBot.create(:build_attempt, :state => 'failed', :build_part => build_part) expect(build.build_parts.failed_or_errored).to eq([build_part]) allow(GitRepo).to receive(:inside_copy).and_yield expect(subject).to_not receive(:all_partitions) end it "should return the emails for the modules that are failing" do allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true) .and_return([{:file => "module-one/src/main/java/com/lobsters/Foo.java", :emails => ["userone@example.com"]}, {:file => "module-two/src/main/java/com/lobsters/Bar.java", :emails => ["usertwo@example.com"]}, {:file => "failed-module/src/main/java/com/lobsters/Baz.java", :emails => ["userfour@example.com"]}, {:file => "failed-module/src/main/java/com/lobsters/Bing.java", :emails => ["userfour@example.com"]}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("module-one/pom.xml").and_return(true) allow(File).to receive(:exist?).with("module-two/pom.xml").and_return(true) allow(File).to receive(:exist?).with("failed-module/pom.xml").and_return(true) allow(subject).to receive(:depends_on_map).and_return( { "module-one" => ["module-one", "module-three", "failed-module"].to_set, "module-two" => ["module-two", "module-three"].to_set, "failed-module" => ["failed-module"].to_set } ) email_and_files = subject.emails_for_commits_causing_failures expect(email_and_files.size).to eq(2) expect(email_and_files["userone@example.com"]).to eq(["module-one/src/main/java/com/lobsters/Foo.java"]) expect(email_and_files["userfour@example.com"].size).to eq(2) expect(email_and_files["userfour@example.com"]).to include("failed-module/src/main/java/com/lobsters/Baz.java") expect(email_and_files["userfour@example.com"]).to include("failed-module/src/main/java/com/lobsters/Bing.java") end context "with ignore_paths set" do let(:kochiku_yml) { { 'maven_settings' => { 'ignore_paths' => ['ignored-module'], } } } it "should not return emails if changes are on an ignored path and not in the dependency map" do allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true) .and_return([{:file => "ignored-module/src/main/java/com/lobsters/Foo.java", :emails => ["userone@example.com"]}, {:file => "failed-module/src/main/java/com/lobsters/Bing.java", :emails => ["userfour@example.com"]}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("ignored-module/pom.xml").and_return(true) allow(File).to receive(:exist?).with("failed-module/pom.xml").and_return(true) allow(subject).to receive(:depends_on_map).and_return({ "ignored-module" => ["ignored-module"].to_set, "failed-module" => ["failed-module"].to_set }) email_and_files = subject.emails_for_commits_causing_failures expect(email_and_files.size).to eq(1) expect(email_and_files["userfour@example.com"]).to eq(["failed-module/src/main/java/com/lobsters/Bing.java"]) end it "should return emails if changes are on an ignored path but are in the dependency map" do allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true) .and_return([{:file => "ignored-module/src/main/java/com/lobsters/Foo.java", :emails => ["userone@example.com"]}, {:file => "failed-module/src/main/java/com/lobsters/Bing.java", :emails => ["userfour@example.com"]}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("ignored-module/pom.xml").and_return(true) allow(File).to receive(:exist?).with("failed-module/pom.xml").and_return(true) allow(subject).to receive(:depends_on_map).and_return({ "ignored-module" => ["ignored-module", "failed-module"].to_set, "failed-module" => ["failed-module"].to_set }) email_and_files = subject.emails_for_commits_causing_failures expect(email_and_files.size).to eq(2) expect(email_and_files["userone@example.com"]).to eq(["ignored-module/src/main/java/com/lobsters/Foo.java"]) expect(email_and_files["userfour@example.com"]).to eq(["failed-module/src/main/java/com/lobsters/Bing.java"]) end end context "with build_everything set" do let(:kochiku_yml) { { 'maven_settings' => { 'build_everything' => ['build-all'], } } } it "should return email for change to build_everything even if build_everything module does not depend on changed file" do allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true) .and_return([{:file => "build-all/src/main/java/com/lobsters/Foo.java", :emails => ["userone@example.com"]}, {:file => "module-four/src/main/java/com/lobsters/Bar.java", :emails => ["userfour@example.com"]}]) allow(File).to receive(:exist?).and_return(false) allow(File).to receive(:exist?).with("build-all/pom.xml").and_return(true) allow(File).to receive(:exist?).with("failed-module/pom.xml").and_return(true) allow(subject).to receive(:depends_on_map).and_return({ "build-all" => ["build-all"].to_set, "module-four" => ["module-four"].to_set }) email_and_files = subject.emails_for_commits_causing_failures expect(email_and_files.size).to eq(2) expect(email_and_files["userone@example.com"]).to eq(["build-all/src/main/java/com/lobsters/Foo.java"]) expect(email_and_files["userfour@example.com"]).to eq(["module-four/src/main/java/com/lobsters/Bar.java"]) end end end end describe "#sort_modules" do before do allow(subject).to receive(:module_dependency_map).and_return({ "module-one" => ["module-two"].to_set, "module-two" => ["module-three"].to_set, "module-three" => ["module-four"].to_set, "module-four" => Set.new, "module-five" => Set.new, }) end it "should sort the modules based on a topological sort of the dependency map" do sorted_modules = subject.sort_modules(["module-one", "module-three", "module-four", "module-two"]) expect(sorted_modules).to eq(["module-four", "module-three", "module-two", "module-one"]) end it "should sort partial module lists that depend on each other" do sorted_modules = subject.sort_modules(["module-two", "module-three"]) expect(sorted_modules).to eq(["module-three", "module-two"]) end it "should sort partial module lists of one" do expect(subject.sort_modules(["module-two"])).to eq(["module-two"]) end it "should sort empty module lists" do expect(subject.sort_modules([])).to eq([]) end end describe "#depends_on_map" do it "should convert a dependency map to a depends on map" do allow(subject).to receive(:transitive_dependency_map).and_return({ "module-one" => ["a", "b"].to_set, "module-two" => ["b", "c", "module-one"].to_set, "module-three" => Set.new }) depends_on_map = subject.depends_on_map expect(depends_on_map["module-one"]).to eq(["module-one", "module-two"].to_set) expect(depends_on_map["module-two"]).to eq(["module-two"].to_set) expect(depends_on_map["module-three"]).to eq(["module-three"].to_set) expect(depends_on_map["a"]).to eq(["a", "module-one"].to_set) expect(depends_on_map["b"]).to eq(["b", "module-one", "module-two"].to_set) expect(depends_on_map["c"]).to eq(["c", "module-two"].to_set) end end context "with actual files" do let(:top_level_pom) { <<-POM module-one module-two module-three POM } let(:module_one_pom) { <<-POM one-branch com.lobsters module-core com.lobsters module-extras junit junit POM } let(:module_two_pom) { <<-POM two-branch com.lobsters module-extras com.lobsters module-three POM } let(:module_three_pom) { <<-POM com.lobsters module-three junit junit POM } let(:module_four_pom) { <<-POM super-branch com.lobsters module-four com.lobsters super-module POM } before do allow(File).to receive(:read).with(Partitioner::Maven::POM_XML).and_return(top_level_pom) allow(File).to receive(:read).with("module-one/pom.xml").and_return(module_one_pom) allow(File).to receive(:read).with("module-two/pom.xml").and_return(module_two_pom) allow(File).to receive(:read).with("module-three/pom.xml").and_return(module_three_pom) end describe "#module_dependency_map" do it "it should get the dependencies from a pom" do dependency_map = subject.module_dependency_map expect(dependency_map["module-one"]).to eq(["module-two"].to_set) expect(dependency_map["module-two"]).to eq(["module-three"].to_set) expect(dependency_map["module-three"]).to eq(Set.new) end context "with a dependency missing a groupId" do let(:module_three_pom) { <<-POM com.lobsters module-three junit POM } it "it should return a useful error message" do expect { subject.module_dependency_map }.to raise_error("dependency in module-three/pom.xml is missing an artifactId or groupId") end end end describe "#transitive_dependency_map" do it "it should get the transitive dependencies from a pom" do dependency_map = subject.transitive_dependency_map expect(dependency_map["module-one"]).to eq(["module-one", "module-two", "module-three"].to_set) expect(dependency_map["module-two"]).to eq(["module-two", "module-three"].to_set) expect(dependency_map["module-three"]).to eq(["module-three"].to_set) end end end describe "#transitive_dependencies" do it "should return the module in a set as a base case" do expect(subject.transitive_dependencies("module-one", {"module-one" => Set.new})).to eq(["module-one"].to_set) end it "should work for the recursive case" do dependency_map = { "module-one" => %w(a b c).to_set, "a" => ["d"].to_set, "b" => ["d", "e"].to_set, "c" => Set.new, "d" => ["e"].to_set, "e" => Set.new, "f" => Set.new } transitive_map = subject.transitive_dependencies("module-one", dependency_map) expect(transitive_map).to eq(["module-one", "a", "b", "c", "d", "e"].to_set) end end describe "#file_to_module" do before do allow(File).to receive(:exist?).and_return(false) end it "should return the module for a src main path" do allow(File).to receive(:exist?).with("oyster/pom.xml").and_return(true) expect(subject.file_to_module("oyster/src/main/java/com/lobsters/oyster/OysterApp.java")).to eq("oyster") end it "should return the module in a subdirectory" do allow(File).to receive(:exist?).with("gateways/cafis/pom.xml").and_return(true) expect(subject.file_to_module("gateways/cafis/src/main/java/com/lobsters/gateways/cafis/data/DataField_9_6_1.java")) .to eq("gateways/cafis") end it "should return the module for a src test path even if there is pom in the parent directory" do allow(File).to receive(:exist?).with("integration/hibernate/pom.xml").and_return(true) allow(File).to receive(:exist?).with("integration/hibernate/tests/pom.xml").and_return(true) expect(subject.file_to_module("integration/hibernate/tests/src/test/java/com/lobsters/integration/hibernate/ConfigurationExtTest.java")) .to eq("integration/hibernate/tests") end it "should return a module for a pom change" do allow(File).to receive(:exist?).with("common/pom.xml").and_return(true) expect(subject.file_to_module("common/pom.xml")).to eq("common") end it "should return nil for a toplevel change" do expect(subject.file_to_module("pom.xml")).to be_nil expect(subject.file_to_module("Gemfile.lock")).to be_nil expect(subject.file_to_module("non_maven_dependencies/README")).to be_nil end end end ================================================ FILE: spec/lib/partitioner/shared_default_behavior.rb ================================================ require 'spec_helper' require 'partitioner' RSpec.shared_examples "Partitioner::Default behavior" do |partitioner_class| let(:build) { FactoryBot.create(:build) } let(:partitioner) { partitioner_class.new(build, build.kochiku_yml) } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml) allow(GitRepo).to receive(:inside_copy).and_yield end let(:kochiku_yml) do { "targets" => [ { 'type' => 'rspec', 'glob' => 'spec/**/*_spec.rb', 'workers' => 3, 'balance' => rspec_balance, 'manifest' => rspec_manifest, 'time_manifest' => rspec_time_manifest, }, { 'type' => 'cuke', 'glob' => 'features/**/*.feature', 'workers' => 3, 'balance' => cuke_balance, 'manifest' => cuke_manifest, 'time_manifest' => cuke_time_manifest, } ] } end let(:rspec_balance) { 'alphabetically' } let(:rspec_manifest) { nil } let(:cuke_balance) { 'alphabetically' } let(:cuke_manifest) { nil } let(:rspec_time_manifest) { nil } let(:cuke_time_manifest) { nil } describe '#emails_for_commits_causing_failures' do subject { partitioner.emails_for_commits_causing_failures } it "should return a hash" do expect(subject).to be_a(Hash) end end describe '#partitions' do subject { partitioner.partitions } context "with a kochiku.yml that does not use Ruby" do let(:kochiku_yml) do { "targets" => [ { 'type' => 'other', 'glob' => 'spec/**/*_spec.rb', 'workers' => 1, } ] } end it "should not include a ruby version" do partitions = subject expect(partitions.size).to be(1) expect(partitions.first["type"]).to eq("other") expect(partitions.first["files"]).not_to be_empty expect(partitions.first["options"]).not_to have_key("ruby") end end context "with a ruby-based kochiku.yml" do let(:queue_override) { nil } let(:retry_count) { nil } let(:kochiku_yml) do { "ruby" => ["ree-1.8.7-2011.12"], "targets" => [ { 'type' => 'rspec', 'glob' => 'spec/**/*_spec.rb', 'workers' => 3, 'balance' => rspec_balance, 'manifest' => rspec_manifest, 'queue_override' => queue_override, 'retry_count' => retry_count, } ] } end it "parses options from kochiku yml" do partitions = subject expect(partitions.first["options"]["ruby"]).to eq("ree-1.8.7-2011.12") expect(partitions.first["type"]).to eq("rspec") expect(partitions.first["files"]).not_to be_empty expect(partitions.first["queue"]).to eq("developer") expect(partitions.first["retry_count"]).to eq(0) expect(partitions.first['options']).not_to include('log_file_globs') end context "with a master build" do let(:build) { FactoryBot.create(:convergence_branch_build) } it "should use the ci queue" do expect(build.branch_record).to be_convergence expect(subject.first["queue"]).to eq("ci") end context "with queue_override" do let(:queue_override) { "override" } it "should override the queue on the build part" do expect(subject.first["queue"]).to eq("ci-override") end end end context "with a branch build" do it "should use the developer queue" do expect(build.branch_record).to_not be_convergence expect(subject.first["queue"]).to eq("developer") end context "with queue_override" do let(:queue_override) { "override" } it "should override the queue on the build part" do expect(subject.first["queue"]).to eq("developer-override") end end context "with retry_count" do let(:retry_count) { 2 } it "should set the retry count" do expect(subject.first["retry_count"]).to eq(2) end end end end context 'with log_file_globs' do let(:kochiku_yml) do { 'log_file_globs' => log_files, 'targets' => [ { 'type' => 'other', 'glob' => 'spec/**/*_spec.rb', 'workers' => 1, } ] } end context 'that uses a single string' do let(:log_files) { 'mylog.log' } it 'puts an array into the options' do expect(subject.first['options']['log_file_globs']).to eq(['mylog.log']) end end context 'that uses an array' do let(:log_files) { ['mylog.log', 'another.log'] } it 'puts the array into the options' do expect(subject.first['options']['log_file_globs']).to eq(['mylog.log', 'another.log']) end end end context 'with different log_file_globs specified for different targets' do let(:kochiku_yml) do { 'targets' => [ { 'type' => 'unit', 'glob' => 'spec/**/*_spec.rb', 'workers' => 1, 'log_file_globs' => "log1", }, { 'type' => 'other', 'glob' => 'spec/**/*_spec.rb', 'workers' => 1, 'log_file_globs' => "log2" } ] } end it "should parse log_file_globs properly" do expect(subject.first['options']['log_file_globs']).to eq(['log1']) expect(subject.second['options']['log_file_globs']).to eq(['log2']) end end context 'when the glob matches' do before { allow(Dir).to receive(:[]).and_return(matches) } context 'no files' do let(:matches) { [] } it 'does nothing' do expect(subject).to eq([]) end end context 'one file' do let(:matches) { %w(a) } it 'makes one partition' do expect(subject).to include(a_hash_including({ 'files' => %w(a) })) end end context 'multiple files' do let(:matches) { %w(a b c d) } # :rspec_balance set to alphabetically above it 'using alphabetically' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(a b) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) expect(partitions).to include(a_hash_including({ 'files' => %w(d) })) end context 'and balance is round_robin' do let(:rspec_balance) { 'round_robin' } it 'uses round_robin' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(a d) })) expect(partitions).to include(a_hash_including({ 'files' => %w(b) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) end context 'and a manifest file is specified' do before { allow(YAML).to receive(:load_file).with(rspec_manifest).and_return(%w(c b a)) } let(:rspec_manifest) { 'manifest.yml' } let(:matches) { %w(a b c d) } it 'uses the manifest' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(c d) })) expect(partitions).to include(a_hash_including({ 'files' => %w(a) })) expect(partitions).to include(a_hash_including({ 'files' => %w(b) })) end end context 'and time manifest files are specified' do before do allow(YAML).to receive(:load_file).with(rspec_time_manifest).and_return( { 'a.spec' => [2], 'b.spec' => [5, 8], 'c.spec' => [9, 6], 'd.spec' => [5, 8], 'deleted.spec' => [10], } ) allow(YAML).to receive(:load_file).with(cuke_time_manifest).and_return( { 'f.feature' => [2], 'g.feature' => [5, 8], 'h.feature' => [6, 9], 'i.feature' => [15, 16], } ) allow(Dir).to receive(:[]).with("spec/**/*_spec.rb").and_return(spec_matches) allow(Dir).to receive(:[]).with("features/**/*.feature").and_return(feature_matches) end let(:rspec_time_manifest) { 'rspec_time_manifest.yml' } let(:cuke_time_manifest) { 'cuke_time_manifest.yml' } let(:spec_matches) { %w(a.spec b.spec c.spec d.spec e.spec) } let(:feature_matches) { %w(f.feature g.feature h.feature i.feature) } it 'should greedily partition files in the time_manifest, and round robin the remaining files' do partitions = subject expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['c.spec', 'e.spec'] })) expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['d.spec', 'a.spec'] })) expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['b.spec'] })) expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['i.feature'] })) expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['h.feature'] })) expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['g.feature', 'f.feature']})) end end end context 'and balance is size' do let(:rspec_balance) { 'size' } before do allow(File).to receive(:size).with('a').and_return(1) allow(File).to receive(:size).with('b').and_return(1000) allow(File).to receive(:size).with('c').and_return(100) allow(File).to receive(:size).with('d').and_return(10) end it 'uses size' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(b a) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) expect(partitions).to include(a_hash_including({ 'files' => %w(d) })) end end context 'and balance is size_greedy_partitioning' do let(:rspec_balance) { 'size_greedy_partitioning' } before do allow(File).to receive(:size).with('a').and_return(1) allow(File).to receive(:size).with('b').and_return(1000) allow(File).to receive(:size).with('c').and_return(100) allow(File).to receive(:size).with('d').and_return(10) end it 'uses greedy_size' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(b) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) expect(partitions).to include(a_hash_including({ 'files' => %w(d a) })) end end context 'and balance is size_average_partitioning' do let(:rspec_balance) { 'size_average_partitioning' } before do allow(File).to receive(:size).with('a').and_return(1) allow(File).to receive(:size).with('b').and_return(1000) allow(File).to receive(:size).with('c').and_return(100) allow(File).to receive(:size).with('d').and_return(10) end it 'uses size_average' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(a b) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) expect(partitions).to include(a_hash_including({ 'files' => %w(d) })) end end context 'and balance is isolated' do let(:rspec_balance) { 'isolated' } it 'isolates files' do partitions = subject expect(partitions).to include(a_hash_including({ 'files' => %w(a) })) expect(partitions).to include(a_hash_including({ 'files' => %w(b) })) expect(partitions).to include(a_hash_including({ 'files' => %w(c) })) expect(partitions).to include(a_hash_including({ 'files' => %w(d) })) end end end end context 'when target is specified' do let(:kochiku_yml) do { 'targets' => [ { 'type' => 'instrumentation', 'target' => 'util:libraryA' }, { 'type' => 'instrumentation', 'target' => ['util:libraryB', 'util:libraryC'] } ] } end it 'accepts both strings and arrays' do expect(subject.size).to eq(2) expect(subject[0]['files']).to eq(['util:libraryA']) expect(subject[1]['files']).to eq(['util:libraryB', 'util:libraryC']) end context "when workers and/or glob are also specified" do let(:kochiku_yml) do { 'targets' => [ { 'type' => 'instrumentation', 'target' => 'util:libraryA', 'workers' => 10, 'glob' => 'spec/**/*_spec.rb' } ] } end it 'ignores them' do expect(subject.size).to eq(1) expect(subject[0]['files']).to eq(['util:libraryA']) end end end end end ================================================ FILE: spec/lib/partitioner_spec.rb ================================================ require 'spec_helper' require 'partitioner' describe Partitioner do let(:build) { FactoryBot.create(:build, ref: to_40('1')) } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml) allow(GitRepo).to receive(:inside_repo).and_yield allow(GitRepo).to receive(:inside_copy).and_yield end describe "#for_build" do subject { Partitioner.for_build(build) } context "when there is no kochiku.yml" do let(:kochiku_yml) { nil } it "should return a single partiion" do partitions = subject.partitions expect(partitions.size).to eq(1) expect(partitions.first["type"]).to eq("test") expect(partitions.first["files"]).not_to be_empty end end context "when there is a kochiku.yml" do context "when no partitioner is specified" do let(:kochiku_yml) do { "ruby" => ["ree-1.8.7-2011.12"], "targets" => [ { 'type' => 'rspec', 'glob' => 'spec/**/*_spec.rb', 'workers' => 3, } ] } end it "parses options from kochiku yml" do allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc) expect(Rails.logger).to receive(:info).with("Partition finished: [DEFAULT] 0.0 1111111111111111111111111111111111111111") partitions = subject.partitions expect(partitions.first["options"]["ruby"]).to eq("ree-1.8.7-2011.12") expect(partitions.first["type"]).to eq("rspec") expect(partitions.first["files"]).not_to be_empty expect(partitions.first["queue"]).to eq("developer") expect(partitions.first["retry_count"]).to eq(0) end end context "when using the maven partitioner" do let(:kochiku_yml) { {'partitioner' => 'maven'} } it "should call the maven partitioner" do allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc) expect(Rails.logger).to receive(:info).with("Partition finished: [maven] 0.0 1111111111111111111111111111111111111111") expect(subject).to be_a(Partitioner::Maven) end end context "when using the dependency_map partitioner" do let(:kochiku_yml) { {'partitioner' => 'dependency_map'} } it "should call the dependency_map partitioner" do allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc) expect(Rails.logger).to receive(:info).with("Partition finished: [dependency_map] 0.0 1111111111111111111111111111111111111111") expect(subject).to be_a(Partitioner::DependencyMap) end end end end end ================================================ FILE: spec/lib/remote_server/github_spec.rb ================================================ require 'spec_helper' require 'remote_server' require 'remote_server/github' describe RemoteServer::Github do def make_server(url) described_class.new(url, Settings.git_server(url)) end describe "base_api_url" do describe "for github.com" do it "should use the api subdomain" do url = "git@github.com:square/kochiku.git" expect(make_server(url).base_api_url).to eq("https://api.github.com/repos/square/kochiku") end end describe "for github enterprise" do it "should use the api path prefix" do url = "git@git.example.com:square/kochiku.git" expect(make_server(url).base_api_url).to eq("https://git.example.com/api/v3/repos/square/kochiku") end end end describe '#attributes' do it 'raises UnknownUrlFormat for invalid urls' do expect { make_server("https://github.com/blah") }.to raise_error(RemoteServer::UnknownUrlFormat) expect { make_server("github.com/asdf") }.to raise_error(RemoteServer::UnknownUrlFormat) end it 'parses ssh URLs' do result = make_server("git@github.com:who/myrepo.git") expect(result.attributes).to eq( host: 'github.com', repository_namespace: 'who', repository_name: 'myrepo', possible_hosts: ['github.com'] ) end it 'parses git:// URLs' do result = make_server("git://github.com/who/myrepo.git") expect(result.attributes).to eq( host: 'github.com', repository_namespace: 'who', repository_name: 'myrepo', possible_hosts: ['github.com'] ) end it 'parses HTTPS URLs' do result = make_server("https://git.example.com/who/myrepo.git") expect(result.attributes).to eq( host: 'git.example.com', repository_namespace: 'who', repository_name: 'myrepo', possible_hosts: ['git.example.com'] ) end it 'should allow periods, hyphens, and underscores in repository names' do result = make_server("git@github.com:angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') result = make_server("git://github.com/angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') result = make_server("https://github.com/angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') end it 'should not allow characters disallowed by Github in repository names' do %w(! @ # $ % ^ & * ( ) = + \ | ` ~ [ ] { } : ; ' " ? /).each do |symbol| expect { make_server("git@github.com:angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) expect { make_server("git://github.com/angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) expect { make_server("https://github.com/angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) end end end describe '#canonical_repository_url' do it 'should return a ssh url when given a https url' do https_url = "https://github.com/square/test-repo1.git" result = make_server(https_url).canonical_repository_url expect(result).to eq("git@github.com:square/test-repo1.git") end it 'should do nothing when given a ssh url' do ssh_url = "git@github.com:square/test-repo1.git" result = make_server(ssh_url).canonical_repository_url expect(result).to eq(ssh_url) end end describe '#get_branch_url' do it 'should return the expected url' do https_url = "https://github.com/square/test-repo1.git" result = make_server(https_url).get_branch_url('my-new-branch') expect(result).to eq("https://github.com/square/test-repo1/tree/my-new-branch") end end describe '#open_pull_request_url' do it 'should return the expected url' do https_url = "https://github.com/square/test-repo1.git" result = make_server(https_url).open_pull_request_url('my-new-branch') expect(result).to eq("https://github.com/square/test-repo1/pull/new/master...my-new-branch") end end end ================================================ FILE: spec/lib/remote_server/stash_spec.rb ================================================ require 'spec_helper' require 'remote_server' require 'remote_server/stash' describe 'stash integration test' do before do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash username: stashuser password_file: /password YAML stub_const "Settings", settings allow(File).to receive(:read).with("/password").and_return("stashpassword") end let(:url) { 'https://stash.example.com/scm/foo/bar.git' } let(:stash) { RemoteServer::Stash.new(url, Settings.git_server(url)) } let(:stash_request) { stash.stash_request } describe ".setup_auth!" do it "should send username and password on" do request = double expect(request).to receive(:basic_auth).with("stashuser", "stashpassword") stash_request.setup_auth!(request) end end describe "#update_commit_status!" do let(:build) { double('build', ref: 'abc123', repository: double('repository', to_param: 'my_namespace/my_repo_name'), succeeded?: true, id: 123) } it "should post to stash" do stub_request(:post, "https://@stash.example.com/rest/build-status/1.0/commits/#{build.ref}") .with(basic_auth: ['stashuser', 'stashpassword']) stash.update_commit_status!(build) expect(WebMock).to have_requested(:post, "https://stash.example.com/rest/build-status/1.0/commits/#{build.ref}") end end end describe RemoteServer::Stash do before do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash aliases: - git-alias.example.com YAML stub_const "Settings", settings end def make_server(url) described_class.new(url, Settings.git_server(url)) end describe '#attributes' do it 'parses HTTPS url' do result = make_server \ "https://stash.example.com/scm/myproject/myrepo.git" expect(result.attributes).to include( host: 'stash.example.com', repository_namespace: 'myproject', repository_name: 'myrepo' ) end it 'does not support HTTP auth credentials in URL' do # Use a netrc file instead. expect { make_server \ "https://don@stash.example.com/scm/myproject/myrepo.git" }.to raise_error(RemoteServer::UnknownUrlFormat) end it 'parses ssh URLs' do result = make_server \ "git@stash.example.com:myproject/myrepo.git" expect(result.attributes).to include( host: 'stash.example.com', repository_namespace: 'myproject', repository_name: 'myrepo' ) end it 'parses ssh URLs prefixed with ssh://' do result = make_server \ "ssh://git@stash.example.com/myproject/myrepo.git" expect(result.attributes).to include( host: 'stash.example.com', repository_namespace: 'myproject', repository_name: 'myrepo' ) end it 'parses ssh URLs with an explicit port' do result = make_server \ "ssh://git@stash.example.com:7999/myproject/myrepo.git" expect(result.attributes).to include( host: 'stash.example.com', repository_namespace: 'myproject', repository_name: 'myrepo', port: '7999' ) end it 'should allow periods, hyphens, and underscores in repository names' do result = make_server("git@stash.example.com:angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') result = make_server("ssh://git@stash.example.com/angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') result = make_server("https://stash.example.com/scm/angular/an-gu_lar.js.git") expect(result.attributes[:repository_name]).to eq('an-gu_lar.js') end it 'should not allow characters disallowed by Github in repository names' do %w(! @ # $ % ^ & * ( ) = + \ | ` ~ [ ] { } : ; ' " ? /).each do |symbol| expect { make_server("git@stash.example.com:angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) expect { make_server("ssh://git@stash.example.com/angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) expect { make_server("https://stash.example.com/scm/angular/bad#{symbol}name.git") }.to raise_error(RemoteServer::UnknownUrlFormat) end end end describe "#canonical_repository_url" do it 'should return a https url when given a ssh url' do ssh_url = "ssh://git@stash.example.com:7999/foo/bar.git" result = make_server(ssh_url).canonical_repository_url expect(result).to eq("https://stash.example.com/scm/foo/bar.git") end it 'should do nothing when given a https url' do https_url = "https://stash.example.com/scm/foo/bar.git" result = make_server(https_url).canonical_repository_url expect(result).to eq(https_url) end end describe "#base_api_url" do it 'should use the primary host name' do https_url = "https://git-alias.example.com/scm/foo/bar.git" result = make_server(https_url).base_api_url expect(result).to eq('https://stash.example.com/rest/api/1.0/projects/foo/repos/bar') end end describe "#base_html_url" do it 'should use the primary host name' do https_url = "https://git-alias.example.com/scm/foo/bar.git" result = make_server(https_url).base_html_url expect(result).to eq('https://stash.example.com/projects/FOO/repos/bar') end end describe "#merge" do it 'uses stash API' do https_url = "https://stash.example.com/scm/foo/bar.git" server = make_server(https_url) allow(server).to receive(:get_pr_id_and_version).and_return([1, 5]) allow(server).to receive(:can_merge?).and_return(true) allow(server).to receive(:perform_merge).and_return(true) expect(server).to receive(:get_pr_id_and_version).once expect(server).to receive(:can_merge?).once expect(server).to receive(:perform_merge).once expect { server.merge("abranch") }.to_not raise_error end end describe '#get_branch_url' do it 'should return the expected url' do https_url = "https://stash.example.com/scm/foo/bar.git" result = make_server(https_url).get_branch_url('my-new-branch') expect(result).to eq("https://stash.example.com/projects/FOO/repos/bar?at=refs/heads/my-new-branch") end end describe '#open_pull_request_url' do it 'should return the expected url' do https_url = "https://stash.example.com/scm/foo/bar.git" result = make_server(https_url).open_pull_request_url('my-new-branch') expect(result).to eq("https://stash.example.com/projects/FOO/repos/bar/compare/commits?sourceBranch=refs/heads/my-new-branch") end end describe "#head_commit" do let(:https_url) { "https://stash.example.com/scm/foo/bar.git" } let(:server) { make_server(https_url) } let(:stash_request) { server.stash_request } it "should not raise errors" do allow(server).to receive(:get_pr_id_and_version).and_return([1, 5]) allow(stash_request).to receive(:get).and_return({"values" => [{"id" => "3" * 40}]}.to_json) expect(server).to receive(:get_pr_id_and_version).once expect { server.head_commit("a/branch") }.to_not raise_error end end end ================================================ FILE: spec/lib/remote_server_spec.rb ================================================ require 'spec_helper' require 'remote_server' require 'remote_server/github' require 'remote_server/stash' shared_examples_for 'a remote server' do describe "#sha_for_branch" do let(:url) { good_url } let(:repo_uri) { remote_server.base_api_url } let(:branch) { "test/branch" } let(:branch_head_sha) { "4b41fe773057b2f1e2063eb94814d32699a34541" } let(:subject) { remote_server.sha_for_branch(branch) } it "returns the HEAD SHA for the branch" do expect(subject).to eq(branch_head_sha) end context "with a non-existent repo" do let(:url) { bad_url } before do stub_request(:get, "#{repo_uri}/git/refs/heads/#{branch}").to_return(:status => 404, :body => '{ "message": "Not Found" }') stub_request(:get, "https://stash.example.com/rest/api/1.0/projects/sq/repos/non-existent-repo/commits?limit=1&until=#{branch}") .with(basic_auth: ['stashuser', 'stashpassword']) .to_return(:status => 404, :body => '{ "errors": [ { "context": null, "message": "A detailed error message.", "exceptionName": null } ] }') end it "raises RepositoryDoesNotExist" do expect{ subject }.to raise_error(RemoteServer::RefDoesNotExist) end end context "with a non-existent branch" do let(:branch) { "nonexistant-branch" } before do stub_request(:get, "#{repo_uri}/git/refs/heads/#{branch}").to_return(:status => 404, :body => '{ "message": "Not Found" }') stub_request(:get, "https://@stash.example.com/rest/api/1.0/projects/sq/repos/kochiku/commits?limit=1&until=#{branch}") .with(basic_auth: ['stashuser', 'stashpassword']) .to_return(:status => 400, :body => '{ "errors": [ { "context": null, "message": "A detailed error message.", "exceptionName": null } ] }') end it "raises BranchDoesNotExist" do expect{ subject }.to raise_error(RemoteServer::RefDoesNotExist) end end end end describe 'RemoteServer::GitHub' do let(:good_url) { 'git@git.example.com:square/kochiku.git' } let(:bad_url) { 'git@git.example.com:square/non-existent-repo.git' } before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github YAML stub_const "Settings", settings build_ref_info = <<-RESPONSE { "ref": "refs/heads/#{branch}", "url": "#{repo_uri}/git/refs/heads/#{branch}", "object": { "sha": "#{branch_head_sha}", "type": "commit", "url": "#{repo_uri}/git/commits/#{branch_head_sha}" } } RESPONSE stub_request(:get, "#{repo_uri}/git/refs/heads/#{branch}").to_return(:status => 200, :body => build_ref_info) end it_behaves_like 'a remote server' do let(:remote_server) { RemoteServer::Github.new(url, Settings.git_server(url)) } end end describe 'RemoteServer::Stash' do let(:good_url) { 'ssh://git@stash.example.com/sq/kochiku.git' } let(:bad_url) { 'ssh://git@stash.example.com/sq/non-existent-repo.git' } before do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash username: stashuser password_file: /password YAML stub_const "Settings", settings build_ref_info = <<-RESPONSE { "size": 3, "limit": 3, "isLastPage": false, "values": [ { "id": "#{branch_head_sha}" } ], "start": 0, "filter": null, "nextPageStart": 3 } RESPONSE allow(File).to receive(:read).with("/password").and_return("stashpassword") stub_request(:get, "https://stash.example.com/rest/api/1.0/projects/sq/repos/kochiku/commits?limit=1&until=#{branch}") .with(basic_auth: ['stashuser', 'stashpassword']) .to_return(:status => 200, :body => build_ref_info) end it_behaves_like 'a remote server' do let(:remote_server) { RemoteServer::Stash.new(url, Settings.git_server(url)) } end end describe 'valid_git_host?' do before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github YAML stub_const "Settings", settings end it 'returns true for known git hosts' do known_git_host = 'git.example.com' expect(RemoteServer.valid_git_host?(known_git_host)).to be_truthy end it 'returns false for unknown git hosts' do unknown_git_host = 'example.com' expect(RemoteServer.valid_git_host?(unknown_git_host)).to be_falsey end end ================================================ FILE: spec/lib/server_settings_spec.rb ================================================ require 'spec_helper' RSpec.describe ServerSettings do it "should be able to access the common settings" do options = { :type => 'github', :mirror => 'git://git-mirror.example.com/', :aliases => ['alias1.example.com', 'alias2.example.com'], } settings = ServerSettings.new(options, 'git.example.com') expect(settings.host).to eq('git.example.com') expect(settings.type).to eq('github') expect(settings.mirror).to eq('git://git-mirror.example.com/') expect(settings.aliases).to eq(['alias1.example.com', 'alias2.example.com']) end context "github settings" do describe "oauth token" do it "should read the file and expose the token" do allow(File).to receive(:read).with('/secrets/github_oauth_token').and_return("oauth_token_for_test\n") settings = ServerSettings.new({ oauth_token_file: '/secrets/github_oauth_token' }, 'git.example.com') expect(settings.oauth_token) .to eq('oauth_token_for_test') end end end context "stash settings" do it 'should work' do allow(File).to receive(:read).with('/secrets/stash').and_return("some_password\n") options = { :type => 'stash', :username => 'kochiku', :password_file => '/secrets/stash', } settings = ServerSettings.new(options, 'stash.example.com') expect(settings.type).to eq('stash') expect(settings.stash_username).to eq('kochiku') expect(settings.stash_password).to eq('some_password') end describe "stash password file" do before do File.open(File.join(RSpec.configuration.fixture_path, "stash-pass.txt"), 'w') { |f| f.write("fake-stash-password") } end after do File.unlink(File.join(RSpec.configuration.fixture_path, "stash-pass.txt")) end it 'will work with a relative path' do settings = ServerSettings.new({ password_file: 'spec/fixtures/stash-pass.txt' }, 'stash.example.com') expect(settings.stash_password).to eq("fake-stash-password") end end end end ================================================ FILE: spec/lib/settings_accessor_spec.rb ================================================ require 'settings_accessor' describe SettingsAccessor do describe 'kochiku_protocol' do it 'returns https when use_https is truthy' do settings = SettingsAccessor.new("use_https: true") expect(settings.kochiku_protocol).to eq("https") end it 'returns https when use_https is false' do settings = SettingsAccessor.new("use_https: false") expect(settings.kochiku_protocol).to eq("http") end it 'returns https when use_https is not present' do settings = SettingsAccessor.new("blah: blah") expect(settings.kochiku_protocol).to eq("http") end end it "should support multiple git servers" do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash username: robot github.com: type: github github-enterprise.example.com: type: github mirror: 'git://git-mirror.example.com/' YAML expect(settings.git_servers.keys) .to match_array(%w{stash.example.com github.com github-enterprise.example.com}) expect(settings.git_servers['stash.example.com'].type). to eq('stash') expect(settings.git_servers['github-enterprise.example.com'].type).to eq('github') end it "can look up git servers" do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash github.com: type: github YAML expect(settings.git_server('git@stash.example.com:square/kochiku.git').type).to eq('stash') expect(settings.git_server('https://github.com/square/kochiku.git').type).to eq('github') expect(settings.git_server('https://foobar.com/square/kochiku.git')).to eq(nil) end it "can look up git servers via alias" do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash aliases: - stash-alias.example.com - other-stash-alias.example.com github.com: type: github YAML expect(settings.git_server('git@stash-alias.example.com:square/kochiku.git').host).to eq('stash.example.com') expect(settings.git_server('git@other-stash-alias.example.com:square/kochiku.git').host).to eq('stash.example.com') expect(settings.git_server('git@not-an-alias.example.com:square/kochiku.git')).to eq(nil) end it "can also give me the host which matched" do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github YAML expect(settings.git_server('https://github.com/square/kochiku.git').host).to eq('github.com') end it "still works if git_servers is not in the config file" do settings = SettingsAccessor.new("another_setting:\n") expect(settings.git_server('https://github.com/square/kochiku.git')).to eq(nil) end it "still works if a host is listed without any data" do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: YAML expect(settings.git_server('git@git.example.com:square/kochiku.git').type).to eq(nil) end end ================================================ FILE: spec/lib/stash_merge_executor_spec.rb ================================================ require 'spec_helper' require 'stash_merge_executor' describe StashMergeExecutor do before do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github stash.example.com: type: stash YAML stub_const "Settings", settings end let(:repository) { FactoryBot.create(:stash_repository) } let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') } let(:stash_build) { FactoryBot.create(:build, branch_record: branch) } let(:stash_merger) { described_class.new(stash_build) } context "Using stash repository" do subject { stash_merger.merge_and_push } before do allow_any_instance_of(RemoteServer::Stash).to receive(:head_commit).and_return([1, 5]) end it "should use stash REST api" do expect(stash_build.repository.remote_server).to receive(:merge).once allow(stash_build.repository.remote_server).to receive(:merge).and_return(true) expect(subject).to eq(merge_commit: [1, 5], log_output: "Successfully merged funyuns") end it "should use throw exception if stash api refuses merge" do expect(stash_build.repository.remote_server).to receive(:merge).once allow(stash_build.repository.remote_server).to receive(:merge).and_return(false) expect { subject }.to raise_error(StashMergeExecutor::GitMergeFailedError) end context "for a build on a convergence branch" do let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) } it "should raise an exception" do expect { subject }.to raise_error(/ineligible for merge/) end end end describe "#delete" do context "Using stash repository" do it "should use stash REST api" do expect(stash_build.repository.remote_server).to receive(:delete_branch).once allow(stash_build.repository.remote_server).to receive(:delete_branch).and_return(true) expect { stash_merger.delete_branch }.to_not raise_error end end end end ================================================ FILE: spec/mailers/build_mailer_spec.rb ================================================ require 'spec_helper' require 'partitioner' describe BuildMailer do describe "#error_email" do before do allow(Settings).to receive(:sender_email_address).and_return('kochiku@example.com') allow(Settings).to receive(:kochiku_notifications_email_address).and_return('notify@example.com') end it "sends the email" do build_attempt = FactoryBot.build(:build_attempt, :state => 'errored', :builder => "test-builder") email = BuildMailer.error_email(build_attempt, "error text") expect(email.to).to include('notify@example.com') expect(email.from).to eq(['kochiku@example.com']) expect(email.html_part.body).to include("test-builder") expect(email.text_part.body).to include("test-builder") expect(email.html_part.body).to include("http://") expect(email.text_part.body).to include("http://") expect(email.html_part.body).to include("error text") expect(email.text_part.body).to include("error text") end end describe "#build_break_email" do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') } let(:build) { FactoryBot.create(:build, branch_record: branch) } before do partitioner = instance_double('Partitioner::Base') allow(partitioner).to receive(:emails_for_commits_causing_failures).and_return({}) allow(Partitioner).to receive(:for_build).and_return(partitioner) build_part = build.build_parts.create!(:paths => ["a", "b"], :kind => "cucumber", :queue => 'ci') @build_attempt = build_part.build_attempts.create!(:state => 'failed', :builder => "test-builder") FactoryBot.create(:stdout_build_artifact, build_attempt: @build_attempt) end context "on a convergence branch" do before do branch.update_attribute(:convergence, true) allow(GitBlame).to receive(:changes_since_last_green).and_return([{:hash => "sha", :author => "Joe", :date => "some day", :message => "always be shipping it"}]) allow(GitBlame).to receive(:emails_since_last_green).and_return(["foo@example.com"]) end it "sends the email" do expect(build.branch_record.convergence?).to be(true) email = BuildMailer.build_break_email(build) expect(email.to).to eq(["foo@example.com"]) expect(email.html_part.body).to include(build.branch_record.name) expect(email.text_part.body).to include(build.branch_record.name) expect(email.html_part.body).to include("http://") expect(email.text_part.body).to include("http://") end end context "on a non-convergence branch" do before do expect(build.branch_record.convergence?).to be(false) allow(GitBlame).to receive(:changes_in_branch).and_return([{:hash => "sha", :author => "Joe", :date => "some day", :message => "always be shipping it"}]) allow(GitBlame).to receive(:emails_in_branch).and_return(["foo@example.com"]) end it "sends the email" do email = BuildMailer.build_break_email(build) expect(email.to).to eq(["foo@example.com"]) expect(email.html_part.body).to include(build.branch_record.name) expect(email.text_part.body).to include(build.branch_record.name) expect(email.html_part.body).to include("http://") expect(email.text_part.body).to include("http://") end end context "with emails from a partitioner" do before do partitioner = instance_double('Partitioner::Base') allow(partitioner).to receive(:emails_for_commits_causing_failures).and_return({'foo@example.com' => ['sha']}) allow(Partitioner).to receive(:for_build).and_return(partitioner) allow(GitBlame).to receive(:changes_since_last_green).and_return([{:hash => "sha", :author => "Foo", :date => "some day", :message => "does this work? LOL"}]) end it "uses those emails" do email = BuildMailer.build_break_email(build) expect(email.to).to eq(["foo@example.com"]) expect(email.html_part.body).to include(build.branch_record.name) expect(email.text_part.body).to include(build.branch_record.name) expect(email.html_part.body).to include("http://") expect(email.text_part.body).to include("http://") expect(email.html_part.body).to_not include("pull-requests/") end context "when the build is tied to an open pull request on Stash" do before do allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash) allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_return(3, 4) end it "includes link to PR" do build_part = build.build_parts.create!(:paths => ["a", "b"], :kind => "cucumber", :queue => 'ci') build_part.build_attempts.create!(:state => 'passed', :builder => "test-builder") email = BuildMailer.build_break_email(build) expect(email.html_part.body).to include("pull-requests/3/overview") end end end describe 'failed build part information' do context 'stdout log file has been uploaded' do it 'should link to the log file' do stdout_artifact = @build_attempt.build_artifacts.stdout_log.first email = BuildMailer.build_break_email(build) expect(email.html_part.body).to include(build_artifact_url(stdout_artifact)) end end context 'stdout log file has not been uploaded yet' do before do @build_attempt.build_artifacts.delete_all end it 'should not link to the log file' do email = BuildMailer.build_break_email(build) expect(email.html_part.body).to_not include("/build_artifacts/") end end end end describe '#build_success_email' do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') } let(:build) { FactoryBot.create(:build, branch_record: branch) } before do allow(GitBlame).to receive(:changes_in_branch).and_return([{hash: "sha", author: "Joe", date: "some day", message: "always be shipping it"}]) allow(GitBlame).to receive(:last_email_in_branch).and_return(["foo@example.com"]) build_part = build.build_parts.create!(paths: ["a", "b"], kind: "cucumber", queue: 'ci') build_part.build_attempts.create!(state: 'passed', builder: "test-builder") end it "sends an email" do email = BuildMailer.build_success_email(build) expect(email.to).to eq(["foo@example.com"]) expect(email.html_part.body).to include(repository.name) expect(email.text_part.body).to include(repository.name) expect(email.html_part.body).to include("http://") expect(email.text_part.body).to include("http://") end context "stash repository" do let(:repository) { FactoryBot.create(:stash_repository) } let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') } let(:build) { FactoryBot.create(:build, branch_record: branch) } context "build has an open pull request" do before do allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash) allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_return(3, 4) end it "includes link to PR" do email = BuildMailer.build_success_email(build) expect(email.html_part.body).to include("pull-requests/3/overview") end end context "build does not have a pull request" do before do allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash) allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_raise(RemoteServer::StashAPIError) end it "does not link to a pull request" do email = BuildMailer.build_success_email(build) expect(email.html_part.body).to_not include("pull-requests/") end end end end end ================================================ FILE: spec/mailers/merge_mailer_spec.rb ================================================ require "spec_helper" describe MergeMailer do describe "merge_successful" do it "sends the email" do email = MergeMailer.merge_successful(FactoryBot.create(:build), to_40('w'), ["foo@example.com"], 'deploy log') expect(email.to).to include("foo@example.com") end end describe "merge_failed" do it "sends the email" do email = MergeMailer.merge_failed(FactoryBot.create(:build), ["foo@example.com"], 'deploy log') expect(email.to).to include("foo@example.com") end end end ================================================ FILE: spec/mailers/previews/build_mailer_preview.rb ================================================ require 'git_blame' require 'partitioner' # Stub GitBlame behavior for mailer preview class GitBlame class << self def emails_since_last_green(build) ['test@example.com'] end def emails_in_branch(build) ['test@example.com'] end def last_email_in_branch(build) ['test@example.com'] end def changes_since_last_green(build) [{:hash => "fed2d1188b2005eea51b3d87f819b2ebbdbeb67a", :author => "Joshua Eversmann ", :date => "Wed Jul 2 15:13:31 2014 -0700", :message => "Run success script built back in, Thread checks removed from repo logic"}, {:hash => "a556abc734b7fc284436270bce5c52a409b58fb8", :author => "Joshua Eversmann ", :date => "Mon Jun 30 17:22:12 2014 -0700", :message => "This is a\n multi line commit message\n so ha."}, {:hash => "7abb9c3194ed1793e78a2928c793d9f4172afab2", :author => "Joshua Eversmann ", :date => "Mon Jun 30 17:22:12 2014 -0700", :message => "Put file reading logic into GitRepo"}] end def changes_in_branch(build) [{:hash => "fed2d1188b2005eea51b3d87f819b2ebbdbeb67a", :author => "Joshua Eversmann ", :date => "Wed Jul 2 15:13:31 2014 -0700", :message => "Run success script built back in, Thread checks removed from repo logic"}, {:hash => "a556abc734b7fc284436270bce5c52a409b58fb8", :author => "Joshua Eversmann ", :date => "Mon Jun 30 17:22:12 2014 -0700", :message => "This is a\n multi line commit message\n so ha."}, {:hash => "7abb9c3194ed1793e78a2928c793d9f4172afab2", :author => "Joshua Eversmann ", :date => "Mon Jun 30 17:22:12 2014 -0700", :message => "Put file reading logic into GitRepo"}] end end end # Stub Partitioner behavior for mailer preview module Partitioner def self.for_build(build) Base.new(build, nil) end class Base def initialize(build, kochiku_yml) @build = build @kochiku_yml = kochiku_yml end def emails_for_commits_causing_failures {} end end end class BuildMailerPreview < ActionMailer::Preview def build_break_email BuildMailer.build_break_email(Build.where(:state => 'errored').first) end def build_success_email BuildMailer.build_success_email(Build.where(:state => 'succeeded').first) end end ================================================ FILE: spec/models/branch_spec.rb ================================================ require 'spec_helper' RSpec.describe Branch, type: :model do it 'should fail on nil name' do expect(FactoryBot.build(:branch, name: nil).valid?).to be false end it 'should fail on empty name' do expect(FactoryBot.build(:branch, name: "").valid?).to be false end describe '#abort_in_progress_builds_behind_build' do let(:branch) { FactoryBot.create(:branch) } it 'aborts non-finished builds for a branch' do build1 = branch.builds.create(state: 'succeeded', ref: to_40('1')) build2 = branch.builds.create(state: 'running', ref: to_40('2')) build3 = branch.builds.create(state: 'partitioning', ref: to_40('3')) build4 = branch.builds.create(state: 'partitioning', ref: to_40('4')) build5 = branch.builds.create(state: 'partitioning', ref: to_40('5')) branch.abort_in_progress_builds_behind_build(build4) expect(build1.reload.state).to eq('succeeded') expect(build2.reload.state).to eq('aborted') expect(build3.reload.state).to eq('aborted') expect(build4.reload.state).to eq('partitioning') expect(build5.reload.state).to eq('partitioning') end end describe "#last_completed_build" do let(:branch) { FactoryBot.create(:branch) } subject { branch.last_completed_build } it "should return the most recent build in a completed state" do FactoryBot.create(:build, :branch_record => branch, :state => 'running') FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded') expected = FactoryBot.create(:build, :branch_record => branch, :state => 'errored') FactoryBot.create(:build, :branch_record => branch, :state => 'partitioning') should == expected end end describe '#timing_data_for_recent_builds' do subject { branch.timing_data_for_recent_builds.to_a } let(:branch) { FactoryBot.create(:branch) } context 'when the branch has never been built' do it { should == [] } end context 'when the branch has one build' do let!(:build) { FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded') } context 'when the build has one part' do let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :kind => 'spec') } context 'when the part has zero attempts' do it 'still includes the build' do should == [[ 'spec', build.ref[0, 5], 0, 0, 0, build.id, 'succeeded', build.created_at.to_s ]] end end context 'when the part has an unstarted attempt' do let!(:build_attempt) do FactoryBot.create( :build_attempt, :build_part => build_part, :state => 'runnable' ) end it 'still includes the build' do build_attempt.finish!('running') should == [[ 'spec', build.ref[0, 5], 0, 0, 0, build.id, 'running', build.created_at.to_s ]] end end context 'when the part has one attempt' do let!(:build_attempt) do FactoryBot.create( :build_attempt, :build_part => build_part, :started_at => 12.minutes.ago, :finished_at => 7.minutes.ago, :state => 'passed' ) end it 'shows error bars, ref, and build status' do should == [[ 'spec', build.ref[0, 5], (build_attempt.elapsed_time / 60).round, 0, 0, build.id, 'succeeded', build.created_at.to_s ]] end end end end end end ================================================ FILE: spec/models/build_artifact_spec.rb ================================================ require 'spec_helper' describe BuildArtifact do it "should validate presence of log_file" do expect(BuildArtifact.new).not_to be_valid ba = BuildArtifact.new ba.log_file = File.open(FIXTURE_PATH.join('build_artifact.log')) expect(ba).to be_valid end describe "stdout_log scope" do let!(:artifact) { FactoryBot.create :build_artifact } let!(:stdout_artifact) { FactoryBot.create :build_artifact, :log_file => File.open(FIXTURE_PATH + 'stdout.log.gz') } subject { BuildArtifact.stdout_log } it "should return artifacts that match stdout.log" do should_not include(artifact) should include(stdout_artifact) end end end ================================================ FILE: spec/models/build_attempt_spec.rb ================================================ require 'spec_helper' describe BuildAttempt do it "requires a valid state" do build_attempt = BuildAttempt.new(:state => "asasdfsdf") expect(build_attempt).not_to be_valid expect(build_attempt).to have(1).error_on(:state) build_attempt.state = 'runnable' expect(build_attempt).to be_valid end describe "finish!" do let(:build) { FactoryBot.create(:build, :state => 'runnable', :merge_on_success => true) } let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, retry_count: 2) } let!(:build_attempt) { FactoryBot.create(:build_attempt, :state => 'running', :build_part => build_part) } context "build auto-retries" do it "requests a rebuild if should_reattempt? is true" do allow(build_part).to receive(:should_reattempt?).and_return(true) expect(build_part).to receive(:rebuild!) build_attempt.finish!('failed') end it "does not request a rebuild if should_reattempt? is false" do allow(build_part).to receive(:should_reattempt?).and_return(false) expect(build_part).to_not receive(:rebuild!) build_attempt.finish!('failed') end end it "calls update_state_from_parts!" do expect(build).to receive(:update_state_from_parts!).at_least(:once) build_attempt.finish!('passed') end it "sends an email for an errored build" do expect(BuildMailer).to receive(:error_email).and_return(OpenStruct.new(:deliver => nil)) allow(build_attempt).to receive(:should_reattempt?).and_return(false) build_attempt.finish!('errored') end end end ================================================ FILE: spec/models/build_part_spec.rb ================================================ require 'spec_helper' describe BuildPart do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:branch, :repository => repository) } let(:build) { FactoryBot.create(:build, branch_record: branch, state: 'runnable', created_at: 5.minutes.ago, updated_at: 5.minutes.ago) } let(:build_part) { FactoryBot.create(:build_part, :paths => ["a", "b"], :kind => "spec", :build_instance => build, :queue => 'ci') } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end describe "#create_and_enqueue_new_build_attempt!" do it "should create a new build attempt" do expect { build_part.create_and_enqueue_new_build_attempt! }.to change(build_part.build_attempts, :count).by(1) end it "enqueues onto the queue specified in the build part" do build_part.update_attribute(:queue, 'queueX') expect(BuildAttemptJob).to receive(:enqueue_on).once do |queue, arg_hash| expect(queue).to eq("queueX") end build_part.create_and_enqueue_new_build_attempt! end it "bumps updated_at value on the build record" do # if build.updated_at is not changed then the view caches will be stale original_updated_at = build.updated_at.to_i build_part.create_and_enqueue_new_build_attempt! expect(build.reload.updated_at.to_i).to_not eq(original_updated_at) end it "should enqueue the build attempt for building" do build_part.update_attributes!(:options => {"ruby" => "ree"}) expect(BuildAttemptJob).to receive(:enqueue_on).once do |queue, arg_hash| expect(queue).to eq("ci") expect(arg_hash["build_attempt_id"]).not_to be_blank expect(arg_hash["build_ref"]).not_to be_blank expect(arg_hash["build_kind"]).not_to be_blank expect(arg_hash["test_files"]).not_to be_blank expect(arg_hash["repo_name"]).not_to be_blank expect(arg_hash["test_command"]).not_to be_blank expect(arg_hash["repo_url"]).not_to be_blank expect(arg_hash["options"]).to eq({"ruby" => "ree"}) expect(arg_hash["kochiku_env"]).to eq("test") end build_part.create_and_enqueue_new_build_attempt! end end describe "#job_args" do let(:repository) { FactoryBot.create(:repository, :url => "git@github.com:org/test-repo.git") } context "with a git mirror specified" do before do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github mirror: "git://git-mirror.example.com/" YAML stub_const "Settings", settings end it "should substitute the mirror" do build_attempt = build_part.build_attempts.create!(:state => 'runnable') args = build_part.job_args(build_attempt) expect(args["repo_url"]).to eq("git://git-mirror.example.com/org/test-repo.git") end end context "with no git mirror specified" do before do settings = SettingsAccessor.new(<<-YAML) git_servers: github.com: type: github YAML stub_const "Settings", settings end it "should return the original git url" do build_attempt = build_part.build_attempts.create!(:state => 'runnable') args = build_part.job_args(build_attempt) expect(args["repo_url"]).to eq(repository.url) end end end describe "#unsuccessful?" do subject { build_part.unsuccessful? } context "with all successful attempts" do before do 2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') } end it { should be false } end context "with one successful attempt" do before { 2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') } FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') } it { should be false } end context "with all unsuccessful attempts" do before do 2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') } end it { should be true } end end describe "#status" do subject { build_part.status } context "with all successful attempts" do before do 2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') } end it { should == 'passed' } end context "with one successful attempt" do before do FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') end it { should == 'passed' } end context "with no successful attempts" do before do FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'running') end it { should == 'running' } end end describe "#not_finished?" do subject { build_part.not_finished? } context "when not finished" do it { should be true } end context "when finished" do before { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed', :finished_at => Time.current) } it { should be false } end end describe "#should_reattempt?" do let(:build_part) { FactoryBot.create(:build_part, retry_count: 1, build_instance: build) } it "should reattempt" do expect(build_part.should_reattempt?).to be true end context "when we have already hit the retry count" do before do FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed') FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed') end it "will not reattempt" do expect(build_part.should_reattempt?).to be false end end context "when we are just one away from the retry count" do before do FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed') end it "will reattempt" do expect(build_part.should_reattempt?).to be true end end context "when it fails very fast" do let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) } before do FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 10.seconds.ago, finished_at: Time.current) end it "will reattempt" do expect(build_part.should_reattempt?).to be true end end context "after 5 failures" do let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) } before do 5.times do FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 10.seconds.ago, finished_at: Time.current) end end it "not will reattempt" do expect(build_part.should_reattempt?).to be false end end context "when it fails after a longer time" do let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) } before do FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 70.seconds.ago, finished_at: Time.current) end it "shouldn't reattempt" do expect(build_part.should_reattempt?).to be false end end context "when there has already been a successful attempt" do before do FactoryBot.create(:build_attempt, build_part: build_part, state: 'passed') FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed') end it "will not reattempt" do expect(build_part.should_reattempt?).to be false end end end describe "#as_json" do subject(:json) { build_part.as_json['build_part'].with_indifferent_access } context "with a build attempt" do before do FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') end it "includes synthetic attributes like status" do expect(json[:status]).to eq('passed') end end end end ================================================ FILE: spec/models/build_spec.rb ================================================ require 'spec_helper' describe Build do let(:branch) { FactoryBot.create(:branch) } let(:build) { FactoryBot.create(:build, branch_record: branch) } let(:parts) { [{'type' => 'cucumber', 'files' => ['a', 'b'], 'queue' => 'ci'}, {'type' => 'rspec', 'files' => ['c', 'd'], 'queue' => 'ci'}] } before do allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil) end describe "validations" do it "requires a ref to be set" do build.ref = nil expect(build).not_to be_valid expect(build).to have(1).error_on(:ref) end it "requires a branch_id to be set" do build.branch_id = nil expect(build).not_to be_valid expect(build).to have(1).error_on(:branch_id) end it "should force uniqueness on ref" do build2 = FactoryBot.build(:build, branch_record: branch, ref: build.ref) expect(build2).not_to be_valid expect(build2).to have(1).error_on(:ref) end end describe '#kochiku_yml' do it 'only tries to load once if it fails' do expect(GitRepo).to receive(:load_kochiku_yml).once 5.times do build.kochiku_yml end end end describe "#partition" do it "should create a BuildPart for each path" do build.partition(parts) expect(build.build_parts.map(&:kind)).to match_array(['cucumber', 'rspec']) expect(build.build_parts.map(&:queue)).to match_array(['ci', 'ci']) expect(build.build_parts.find_by_kind('cucumber').paths).to match_array(['a', 'b']) end it "should change state to runnable" do expect { build.partition(parts) }.to change(build, :state).from('partitioning').to('runnable') end it "creates parts with options" do build.partition([{"type" => "cucumber", "files" => ['a'], 'queue' => 'developer', 'options' => {"ruby" => "ree"}}]) build_part = build.build_parts.first build_part.reload expect(build_part.options).to eq({"ruby" => "ree"}) end it "should set the queue" do build.partition([{"type" => "cucumber", "files" => ['a'], 'queue' => 'developer'}]) build_part = build.build_parts.first expect(build_part.queue).to eq('developer') end it "should set the retry_count" do build.partition([{"type" => "cucumber", "files" => ['a'], 'queue' => 'developer', 'retry_count' => 3}]) build_part = build.build_parts.first expect(build_part.retry_count).to eq(3) end it "should create build attempts for each build part" do build.partition(parts) build.build_parts.all? { |bp| expect(bp.build_attempts).to have(1).item } end it "should enqueue build part jobs if repository is enabled" do expect(BuildAttemptJob).to receive(:enqueue_on).twice build.partition(parts) end it "should not enqueue build part jobs if repository is disabled" do build2 = FactoryBot.create(:build_on_disabled_repo) build2.partition(parts) expect(BuildAttemptJob).to receive(:enqueue_on).exactly(0).times expect(build2.build_parts.reload).to be_empty end it "rolls back any changes to the database if an error occurs" do # set parts to an illegal value parts = [{'type' => 'rspec', 'files' => [], 'queue' => 'ci'}] expect(build.build_parts).to be_empty expect(build.state).to eq('partitioning') expect { build.partition(parts) }.to raise_error(ActiveRecord::ActiveRecordError) expect(build.build_parts.reload).to be_empty expect(build.state).to eq('runnable') end end describe "#completed?" do Build::TERMINAL_STATES.each do |state| it "should be true for #{state}" do build.state = state expect(build).to be_completed end end (Build::STATES - Build::TERMINAL_STATES).each do |state| it "should be false for #{state}" do build.state = state expect(build).not_to be_completed end end end describe "#update_state_from_parts!" do let(:build) { FactoryBot.create(:build, branch_record: branch, :state => 'running') } let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build) } let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build) } it "should set a build state to running if it is successful so far, but still incomplete" do FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') build.update_state_from_parts! expect(build.state).to eq('running') end it "should set build state to errored if any of its parts errored" do FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'errored') FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed') build.update_state_from_parts! expect(build.state).to eq('errored') end it "should set build state to succeeded if all of its parts passed" do FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed') build.update_state_from_parts! expect(build.state).to eq('succeeded') end it "should set a build state to doomed if it has a failed part but is still has more parts to process" do FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed') build.update_state_from_parts! expect(build.state).to eq('doomed') end it "should change a doomed build to failed once it is complete" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed') ba2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') build.update_state_from_parts! expect(build.state).to eq('doomed') ba2.update!(state: 'passed') build.update_state_from_parts! expect(build.state).to eq('failed') end it "should set build_state to running when a failed attempt is retried" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'failed') ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') build.update_state_from_parts! expect(build.state).to eq('running') end it "should set build_state to doomed when an attempt is retried but other attempts are failed" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'failed') ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') build.update_state_from_parts! expect(build.state).to eq('doomed') end it "should ignore the old build_attempts" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'errored') ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed') build.update_state_from_parts! expect(build.state).to eq('succeeded') end it "should not ignore old build_attempts that passed" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed') ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'errored') build.update_state_from_parts! expect(build.state).to eq('succeeded') end context "when the build is aborted" do let(:build) { FactoryBot.create(:build, branch_record: branch, state: 'aborted') } it "should set state to succeeded if a build is aborted, but all of its parts passed" do # scenario is applicable if a build is aborted only after its build parts are already running FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed') build.update_state_from_parts! expect(build.state).to eq('succeeded') end it "should remain aborted when build attempts finish as errored or failed" do FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'errored') build.update_state_from_parts! expect(build.state).to eq('aborted') ba.update_attributes!(state: 'failed') build.update_state_from_parts! expect(build.state).to eq('aborted') end end end describe "#elapsed_time" do it "returns the difference between the build creation time and the last finished time" do build.partition(parts) expect(build.elapsed_time).to be_nil last_attempt = BuildAttempt.find(build.build_attempts.last.id) last_attempt.update_attributes(:finished_at => build.created_at + 10.minutes) expect(build.elapsed_time).to be_within(1.second).of(10.minutes) end end describe "#abort!" do let(:build) { FactoryBot.create(:build, :state => 'runnable', :merge_on_success => true) } it "should mark the build as aborted" do expect{ build.abort! }.to change(build, :state).from('runnable').to('aborted') end it "should strip a true merge_on_success setting" do expect{ build.abort! }.to change(build, :merge_on_success).to(false) end it "should mark all of the build's unstarted build_attempts as aborted" do build_part1 = FactoryBot.create(:build_part, :build_instance => build) build_part2 = FactoryBot.create(:build_part, :build_instance => build) build_attempt_started = FactoryBot.create(:build_attempt, :build_part => build_part1, :state => 'running') build_attempt_unstarted = FactoryBot.create(:build_attempt, :build_part => build_part2, :state => 'runnable') build.abort! expect(build_attempt_started.reload.state).to eq('running') expect(build_attempt_unstarted.reload.state).to eq('aborted') end end describe '#to_png' do let(:build) { FactoryBot.create(:build, :state => state) } let(:png) { build.to_png } let(:png_color) { png.get_pixel(png.width / 2, png.height / 2) } let(:red) { 4151209727 } let(:green) { 3019337471 } let(:blue) { 1856370687 } context 'with succeeded state' do let(:state) { 'succeeded' } it 'returns a green status png' do expect(png_color).to eq(green) end end %w(failed errored aborted doomed).each do |current_state| context "with #{current_state} state" do let(:state) { current_state } it 'returns a red status png' do expect(png_color).to eq(red) end end end %w(partitioning runnable running).each do |current_state| context "with #{current_state} state" do let(:state) { current_state } it 'returns a blue status png' do expect(png_color).to eq(blue) end end end end describe "#previous_successful_build" do let(:successful_build) { build.partition(parts) build.build_parts.each { |part| part.last_attempt.finish!('passed') } build.update_state_from_parts! build.update_attribute(:updated_at, 1.minute.ago) build } it "returns nil when there are no previous successful builds for the branch" do expect(build.succeeded?).to be false build2 = FactoryBot.create(:build, branch_record: branch) expect(build.previous_successful_build).to be_nil expect(build2.previous_successful_build).to be_nil end it "returns the most recent build in state == 'succeeded' prior to this build" do stub_request(:post, /https:\/\/git\.squareup\.com\/api\/v3\/repos\/square\/kochiku\/statuses\//) expect(successful_build.succeeded?).to be true build2 = FactoryBot.create(:build, branch_record: branch) expect(build2.previous_successful_build).to eq(successful_build) end end describe "#mergable_by_kochiku??" do let(:build) { FactoryBot.create(:build, branch_record: branch) } before do expect(build.branch_record).to_not be_convergence expect(build.repository.allows_kochiku_merges).to be true end context "when merge_on_success_enabled? is true" do before do build.update_attributes!(merge_on_success: true) expect(build.merge_on_success_enabled?).to be true end it "is true if it is a passed build" do build.state = 'succeeded' expect(build.mergable_by_kochiku?).to be true end it "is false if it is a failed build" do (Build::TERMINAL_STATES - ['succeeded']).each do |failed_state| build.state = failed_state expect(build.mergable_by_kochiku?).to be false end end end context "with merge_on_success disabled" do it "should never be true" do build.merge_on_success = false build.state = 'succeeded' expect(build.mergable_by_kochiku?).to be false end end context "when allows_kochiku_merges has been disabled on the repository" do before do build.repository.update_attributes(:allows_kochiku_merges => false) end it "should never be true" do build.merge_on_success = true build.state = 'succeeded' expect(build.mergable_by_kochiku?).to be false end end context 'there is a newer build for the same branch' do let(:build) { FactoryBot.create(:build, branch_record: branch, state: 'succeeded', merge_on_success: true) } before do expect(build.mergable_by_kochiku?).to be true end it 'should no longer be mergable' do expect(build).to receive(:newer_branch_build_exists?).and_return(true) expect(build.mergable_by_kochiku?).to be false end end end describe "#merge_on_success_enabled?" do it "is true if it is a developer build with merge_on_success enabled" do build.merge_on_success = true expect(build.merge_on_success_enabled?).to be true end it "is false if it is a developer build with merge_on_success disabled" do build.merge_on_success = false expect(build.merge_on_success_enabled?).to be false end context "for a build on a convergence branch" do let(:build) { FactoryBot.create(:convergence_branch_build) } it "should be false" do build.merge_on_success = true expect(build.merge_on_success_enabled?).to be false end end end describe "#newer_branch_build_exists?" do before do @build1 = FactoryBot.create(:build, branch_record: branch) @build2 = FactoryBot.create(:build, branch_record: branch) end it "should be true for the earlier build" do expect(@build1.newer_branch_build_exists?).to be true end it "should be false for the later build" do expect(@build2.newer_branch_build_exists?).to be false end end describe "#already_failed?" do let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) } it "returns false when there exists successful build attempt" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') expect(build.already_failed?).to eq(false) end it "returns true when there exists no successful build attempt" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'running') expect(build.already_failed?).to eq(true) end end describe "#send_build_status_email!" do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:branch, repository: repository) } let(:build) { FactoryBot.create(:build, state: 'runnable', branch_record: branch) } let(:build_attempt) { build.build_parts.first.build_attempts.create!(state: 'failed') } it "should not send a failure email if the branch has never had a successful build" do expect(BuildMailer).not_to receive(:build_break_email) build.send_build_status_email! end context "for a branch that has had a successful build" do let(:build) { FactoryBot.create(:build, state: 'succeeded', branch_record: branch) FactoryBot.create(:build, state: 'runnable', branch_record: branch) } it "should not send the email if the build is not completed" do expect(BuildMailer).not_to receive(:build_break_email) build.send_build_status_email! end it "should not send the failure email if the build passed" do build.update_attribute(:state, 'succeeded') expect(BuildMailer).not_to receive(:build_break_email) expect(BuildMailer).to receive(:build_success_email).and_return(OpenStruct.new(:deliver => nil)) build.send_build_status_email! end it "should only send the build failure email once" do build.update_attribute(:state, 'failed') expect(BuildMailer).to receive(:build_break_email).once.and_return(OpenStruct.new(:deliver => nil)) build.send_build_status_email! build.send_build_status_email! end it "should send a fail email when the build is finished" do build.update_attribute(:state, 'failed') expect(BuildMailer).to receive(:build_break_email).and_return(OpenStruct.new(:deliver => nil)) build.send_build_status_email! end it "does not send a email if the repository setting is disabled" do build.update_attribute(:state, 'failed') repository.update_attributes!(:send_build_failure_email => false) build.reload expect(BuildMailer).not_to receive(:build_break_email) build.send_build_status_email! end context "when email_on_first_failure is false" do before do repository.update_attribute(:email_on_first_failure, false) end it "should not send email on first build part failure" do build.update_attribute(:state, 'doomed') expect(BuildMailer).to_not receive(:build_break_email) build.send_build_status_email! end context "retries enabled" do let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) } let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) } it "should not send email before retry" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'running') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') expect(BuildMailer).to_not receive(:build_break_email) ba2_1.finish!('failed') end end end context "when email_on_first_failure is true" do before do repository.update_attribute(:email_on_first_failure, true) end context "on a convergence branch build" do let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) } let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) } let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) } it "should not send email prior to retry" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') expect(BuildMailer).to_not receive(:build_break_email) ba2_1.finish!('failed') end end context "branch build" do let(:branch) { FactoryBot.create(:branch, repository: repository) } let(:branch_build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) } let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => branch_build, :retry_count => 3) } let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => branch_build, :retry_count => 3) } it "should send email prior to retry" do ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed') ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running') expect(BuildMailer).to receive(:build_break_email).once.and_return(OpenStruct.new(:deliver => nil)) ba2_1.finish!('failed') end end end context "for a build not on a convergence branch" do before do expect(branch).to_not be_convergence end it "should not send a failure email" do expect(BuildMailer).not_to receive(:build_break_email) build.send_build_status_email! end it "should send a success email" do build.update_attribute(:state, 'succeeded') expect(BuildMailer).to receive(:build_success_email).and_return(OpenStruct.new(:deliver => nil)) build.send_build_status_email! end end end end describe '#as_json' do it 'returns a hash with elapsed_time' do build.partition(parts) hash = build.as_json expect(hash['build'].key?('elapsed_time')).to eq(true) expect(hash['build']['elapsed_time']).to eq(build.elapsed_time) last_attempt = BuildAttempt.find(build.build_attempts.last.id) last_attempt.update_attributes(:finished_at => build.created_at + 10.minutes) hash = build.as_json expect(hash['build'].key?('elapsed_time')).to eq(true) expect(hash['build']['elapsed_time']).to eq(build.elapsed_time) end it 'returns a hash with out test_command' do build.partition(parts) hash = build.as_json expect(hash['build'].key?('test_command')).to eq(false) end it 'returns elapsed_time even when other options are used' do build.partition(parts) hash = build.as_json(include: :build_parts) expect(hash['build'].key?('elapsed_time')).to eq(true) end it 'allows overriding :methods option' do build.partition(parts) hash = build.as_json(methods: :idle_time) expect(hash['build'].key?('elapsed_time')).to eq(false) expect(hash['build'].key?('idle_time')).to eq(true) end end end ================================================ FILE: spec/models/repository_observer_spec.rb ================================================ require 'spec_helper' describe RepositoryObserver do subject { RepositoryObserver.instance } let(:repository) { FactoryBot.create(:repository, :url => "git@git.example.com:square/web.git") } before do settings = SettingsAccessor.new(<<-YAML) git_servers: git.example.com: type: github YAML stub_const "Settings", settings allow(subject).to receive(:should_contact_github?).and_return(true) end it "creates the hook if enabled" do stub_request(:get, "#{repository.base_api_url}/hooks").with do |request| expect(request.headers["Authorization"]).to eq("token #{GithubRequest::OAUTH_TOKEN}") true end.to_return(:body => '[]') stub_request(:post, "#{repository.base_api_url}/hooks").with do |request| expect(request.headers["Authorization"]).to eq("token #{GithubRequest::OAUTH_TOKEN}") body = JSON.parse(request.body) expect(body["name"]).to eq("web") expect(body["events"]).to eq(['pull_request']) expect(body["active"]).to eq(true) true end.to_return(:body => '[]') repository.build_pull_requests = true subject.after_save(repository) end end ================================================ FILE: spec/models/repository_spec.rb ================================================ require 'spec_helper' describe Repository do before do settings = SettingsAccessor.new(<<-YAML) git_servers: stash.example.com: type: stash git.example.com: type: github aliases: - git-alias.example.com github.com: type: github YAML stub_const "Settings", settings end describe '.lookup_by_url' do it 'should return the Repository (straightforward)' do repo = FactoryBot.create(:repository) expect(Repository.lookup_by_url(repo.url)).to eq(repo) end it 'should return the Repository when a host alias is used during creation' do repo = FactoryBot.create(:repository, url: "git@git-alias.example.com:square/some-repo.git") expect(Repository.lookup_by_url("git@git.example.com:square/some-repo.git")).to eq(repo) end it 'should return the Repository when a host alias is used during lookup' do repo = FactoryBot.create(:repository, url: "git@git.example.com:square/some-repo.git") expect(Repository.lookup_by_url("git@git-alias.example.com:square/some-repo.git")).to eq(repo) end it 'should return nil if lookup fails' do expect( Repository.lookup_by_url("git@git-alias.example.com:square/some-repo.git") ).to be_nil end end describe 'creation' do it 'should extract attributes from the url' do repo = Repository.new(url: "git://git.example.com/who/what.git") expect(repo.host).to eq('git.example.com') expect(repo.namespace).to eq('who') expect(repo.name).to eq('what') end it 'should not allow url to trump explicit values' do repo = Repository.new(name: 'explicit_name', namespace: 'explicit_namespace', host: 'git-alias.example.com') repo.url = "git://git.example.com/who/what.git" expect(repo.name).to eq('explicit_name') expect(repo.namespace).to eq('explicit_namespace') expect(repo.host).to eq('git-alias.example.com') end end describe 'validations' do context 'for url' do it "should add a error on url, if url is an an unsupported format" do repo = Repository.new(url: "file://data/git/fun-proj.git") expect(repo).to have(1).error_on(:url) expect(repo.errors_on(:url)).to include("is not in a format supported by Kochiku") end it "should add an error on url on unknown git server" do repo = Repository.new(url: "git@example.com:who/what.git") expect(repo).to have(1).error_on(:url) expect(repo.errors_on(:url)).to include("host is not in Kochiku's list of git servers") end end context "when name" do context "is set" do it "leaves it as is" do repo = Repository.new(url: "git://git.example.com/square/kochiku-name.git", name: "another_repo") repo.valid? expect(repo.name).to eq("another_repo") end end context "is not set when saving" do it "sets the name based on the repository url" do repo = Repository.new(url: "git://git.example.com/square/kochiku-name.git") repo.valid? expect(repo.name).to eq("kochiku-name") end end end context "name" do before do @repo1 = FactoryBot.create(:repository, url: "git@git.example.com:kansas/kansas-city.git") end it "should allow two repositories with the same name from different namespaces" do repo2 = Repository.new(url: "git://git.example.com/missouri/kansas-city.git") expect(repo2).to be_valid end it "should not allow two repositories with the same name and namespaces" do repo2 = Repository.new(url: "git://github.com/kansas/kansas-city.git") repo2.valid? expect(repo2).to have(1).error_on(:name) expect(repo2.errors.full_messages).to include("Namespace + Name combination already exists") end end end context "#interested_github_events" do it 'includes push if run_ci is enabled' do expect(Repository.new(:run_ci => true).interested_github_events).to eq(['pull_request', 'push']) end it 'does not include push if run_ci is enabled' do expect(Repository.new(:run_ci => false).interested_github_events).to eq(['pull_request']) end end context "#promotion_refs" do it "is an empty array when promotion_refs is a empty string" do expect(Repository.new(:on_green_update => "").promotion_refs).to eq([]) end it "is an empty array when promotion_refs is a blank string" do expect(Repository.new(:on_green_update => " ").promotion_refs).to eq([]) end it "is an empty array when promotion_refs is comma" do expect(Repository.new(:on_green_update => " , ").promotion_refs).to eq([]) end it "splits on comma's" do expect(Repository.new(:on_green_update => "a,b,c").promotion_refs).to eq(%w( a b c )) end end context "#base_api_url" do it "handles ssh urls" do repo = Repository.new(url: "git@git.example.com:square/kochiku.git") expect(repo.base_api_url).to eq("https://git.example.com/api/v3/repos/square/kochiku") end end context "#base_html_url" do it "handles ssh urls" do repo = Repository.new(url: "git@git.example.com:square/kochiku.git") expect(repo.base_html_url).to eq("https://git.example.com/square/kochiku") end it "handles http urls" do repo = Repository.new(url: "http://git.example.com/square/kochiku.git") expect(repo.base_html_url).to eq("https://git.example.com/square/kochiku") end it "handles https urls" do repo = Repository.new(url: "https://git.example.com/square/kochiku.git") expect(repo.base_html_url).to eq("https://git.example.com/square/kochiku") end it "handles git read only urls" do repo = Repository.new(url: "git://git.example.com/square/kochiku.git") expect(repo.base_html_url).to eq("https://git.example.com/square/kochiku") end end context "#run_ci=" do it "converts the checkbox to bool" do repository = FactoryBot.create(:repository) repository.run_ci = "1" repository.save repository.reload expect(repository.run_ci).to eq(true) repository.run_ci = "0" repository.save repository.reload expect(repository.run_ci).to eq(false) end end context "#build_pull_requests=" do it "converts the checkbox to bool" do repository = FactoryBot.create(:repository) repository.build_pull_requests = "1" repository.save repository.reload expect(repository.build_pull_requests).to eq(true) repository.build_pull_requests = "0" repository.save repository.reload expect(repository.build_pull_requests).to eq(false) end end it "saves build tags" do repository = FactoryBot.create(:repository) repository.on_green_update = "1,2,3" repository.save repository.reload expect(repository.on_green_update).to eq("1,2,3") end describe '#build_for_commit' do let!(:repositoryA) { FactoryBot.create(:repository) } let!(:repositoryB) { FactoryBot.create(:repository) } let!(:branchA1) { FactoryBot.create(:branch, repository: repositoryA) } let!(:branchB1) { FactoryBot.create(:branch, repository: repositoryB) } let(:sha) { to_40('a') } it "should return the build associated with the repository" do buildA1 = FactoryBot.create(:build, branch_record: branchA1, ref: sha) expect(repositoryA.build_for_commit(sha)).to eq(buildA1) expect(repositoryB.build_for_commit(sha)).to be_nil buildB1 = FactoryBot.create(:build, branch_record: branchB1, ref: sha) expect(repositoryA.build_for_commit(sha)).to eq(buildA1) expect(repositoryB.build_for_commit(sha)).to eq(buildB1) end end describe '#ensure_build_exists' do let(:repository) { FactoryBot.create(:repository) } let(:branch) { FactoryBot.create(:branch, repository: repository) } it 'creates a new build only if one does not exist' do sha = to_40('abcdef') build1 = repository.ensure_build_exists(sha, branch) build2 = repository.ensure_build_exists(sha, branch) expect(build1).not_to eq(nil) expect(build1).to eq(build2) expect(build1.branch_record).to eq(branch) expect(build1.ref).to eq(sha) expect(build1.state).to eq('partitioning') end end end ================================================ FILE: spec/routes_spec.rb ================================================ require 'spec_helper' RSpec.describe "routes", :type => :routing do describe '/badge/org_name/repo_name' do specify { expect(get: '/badge/org_name/repo_name?branch=moonwalker').to route_to( controller: "branches", action: "badge", repository_path: "org_name/repo_name", branch: "moonwalker" ) } end context "branches at" do describe '/:repository_path/:id' do it 'to branch show page' do expect(:get => "/org_name/repo_name/bug-fix").to route_to( :controller => "branches", :action => "show", :repository_path => "org_name/repo_name", :id => "bug-fix" ) end it 'to support branch names with slashes' do expect(:get => "/org_name/repo_name/rob/bug-fix").to route_to( :controller => "branches", :action => "show", :repository_path => "org_name/repo_name", :id => "rob/bug-fix" ) end it 'to support branch names with dots' do expect(:get => "/org_name/repo_name/rob.bug-fix").to route_to( :controller => "branches", :action => "show", :repository_path => "org_name/repo_name", :id => "rob.bug-fix" ) end end describe '/:repository_path/:id member routes' do it 'to a sub page' do expect(:get => "/org_name/repo_name/bug-fix/health").to route_to( :controller => "branches", :action => "health", :repository_path => "org_name/repo_name", :id => "bug-fix" ) end it 'to support branch names with slashes' do expect(:get => "/org_name/repo_name/rob/bug-fix/health").to route_to( :controller => "branches", :action => "health", :repository_path => "org_name/repo_name", :id => "rob/bug-fix" ) end it 'to support branch names with dots' do expect(:get => "/org_name/repo_name/rob.bug-fix/health").to route_to( :controller => "branches", :action => "health", :repository_path => "org_name/repo_name", :id => "rob.bug-fix" ) end end end end ================================================ FILE: spec/spec_helper.rb ================================================ # This file is copied to spec/ when you run 'rails generate rspec:install' ENV["RAILS_ENV"] ||= 'test' require File.expand_path("../../config/environment", __FILE__) require 'rspec/rails' require 'rspec/collection_matchers' require 'webmock/rspec' require 'nokogiri' require 'factory_bot' require 'capybara/rspec' require 'git_blame' include ActionDispatch::TestProcess FIXTURE_PATH = Rails.root.join('spec', 'fixtures') # Requires supporting ruby files with custom matchers and macros, etc, # in spec/support/ and its subdirectories. Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f } # Checks for pending migrations before tests are run. ActiveRecord::Migration.maintain_test_schema! # Test decorators independent of ActionController # https://github.com/drapergem/draper#isolated-tests Draper::ViewContext.test_strategy :fast RSpec.configure do |config| config.expect_with :rspec do |c| c.syntax = :expect end config.mock_with :rspec do |mocks| # Cause any verifying double instantiation for a class that does not # exist to raise, protecting against incorrectly spelt names. mocks.verify_doubled_constant_names = true end config.fixture_path = FIXTURE_PATH # If you're not using ActiveRecord, or you'd prefer not to run each of your # examples within a transaction, remove the following line or assign false # instead of true. config.use_transactional_fixtures = true # Define which fixtures should be globally available. Set to :all to load everything # config.global_fixtures = :all # RSpec Rails can automatically mix in different behaviours to your tests # based on their file location, for example enabling you to call `get` and # `post` in specs under `spec/controllers`. config.infer_spec_type_from_file_location! # lil speed increase because we are not spawning threads in our tests config.threadsafe = false config.example_status_persistence_file_path = "./spec/examples.txt" config.before :each do WebMock.disable_net_connect! allow(JobBase).to receive(:enqueue_in) allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return("") allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_return("") allow(GitBlame).to receive(:changes_since_last_green).and_return([]) allow(GitBlame).to receive(:changes_in_branch).and_return([]) allow(GitBlame).to receive(:files_changed_since_last_build).and_return([]) allow(GitBlame).to receive(:files_changed_since_last_green).and_return([]) allow(GitBlame).to receive(:files_changed_in_branch).and_return([]) ActionMailer::Base.deliveries.clear end end ================================================ FILE: spec/support/command_stubber.rb ================================================ class CommandStubber include RSpec::Mocks::ExampleMethods attr_accessor :executed_commands, :fake_command_output def initialize @executed_commands = [] @fake_command_output = "fake command output" # Always stub to prevent executing git commands. stub_capture2e end def create_stubbed_process_status(exitstatus = 0) double( exitstatus: exitstatus, success?: exitstatus == 0 ) end def stub_capture2e_failure(fail_on_cmd) allow(Open3).to receive(:capture2e) do |*cmd| # cmd is an Array in the format: [{'env' => 'variable'}, 'echo baz'] # where the hash with environment variables is optional @executed_commands << cmd exitstatus = if fail_on_cmd && cmd.any? { |a| a.is_a?(String) && a.start_with?(fail_on_cmd) } 1 else 0 end [@fake_command_output, create_stubbed_process_status(exitstatus)] end end def stub_capture2e stub_capture2e_failure(nil) end def check_cmd_executed(expected_cmd) found = @executed_commands.any? do |commands| commands.any? { |cmd| cmd =~ /^#{expected_cmd}.*/ } end raise Exception, "Failed to find #{expected_cmd} in executed commands" unless found end end ================================================ FILE: spec/support/custom_argument_matchers.rb ================================================ RSpec::Matchers.define :a_string do |x| match { |actual| actual.instance_of?(String) } end ================================================ FILE: spec/support/factories.rb ================================================ FactoryBot.define do factory :branch do sequence(:name) { |n| "branch_#{n}" } association :repository factory :convergence_branch do name "1-x-stable" convergence true end factory :master_branch do name "master" convergence true end factory :branch_on_disabled_repo do association :repository, factory: :disabled_repository end end factory :build do state 'partitioning' ref { SecureRandom.hex(20) } # 20 is the length in bytes, resulting string is twice n association :branch_record, factory: :branch factory :convergence_branch_build do association :branch_record, :factory => :convergence_branch end factory :completed_build do state ['failed', 'succeeded'].sample # specify num_build_parts on the factory to create a build with more than 1 build_part transient do num_build_parts 1 end after(:create) do |build_instance, evaluator| create_list(:build_part_with_build_attempt, evaluator.num_build_parts, build_instance: build_instance) end end factory :build_on_disabled_repo do association :branch_record, factory: :branch_on_disabled_repo end end factory :build_part do association :build_instance, :factory => :build, :state => 'runnable' kind :test paths ["/foo/1.test", "foo/baz/a.test", "foo/baz/b.test"] queue 'ci' factory :build_part_with_build_attempt do after(:create) do |build_part, evaluator| create_list(:completed_build_attempt, 1, build_part: build_part) end end end factory :build_attempt do build_part state 'runnable' factory :completed_build_attempt do state { build_part.build_instance.state == 'succeeded' ? 'passed' : 'failed' } finished_at { Time.current } end end factory :build_artifact do association :build_attempt, :state => 'failed' log_file File.open(FIXTURE_PATH + "build_artifact.log") factory :stdout_build_artifact do log_file File.open(FIXTURE_PATH + "stdout.log") end end factory :repository do sequence(:url) { |n| "git@github.com:org_name/test-repo#{n}.git" } # these repos do not exist on purpose test_command "script/ci worker" on_green_update 'last-green-build' allows_kochiku_merges true enabled true factory :stash_repository do sequence(:url) { |n| "git@stash.example.com:bucket_name/test-repo#{n}.git" } end factory :disabled_repository do enabled false end end end ================================================ FILE: spec/support/git_spec_helper.rb ================================================ # template=/dev/null to ignore any global templatedir the developer may have # configured on their machine. Pipe to /dev/null to ignore the warning about # no template dir found. def suppressed_git_init `git init --template=/dev/null 2> /dev/null` end ================================================ FILE: spec/support/sha_helper.rb ================================================ def to_40(short) multiplier = (40.0 / short.length).ceil (short * multiplier).slice(0, 40) end ================================================ FILE: vendor/assets/javascripts/jquery.flot.categories.js ================================================ /* Flot plugin for plotting textual data or categories. Copyright (c) 2007-2013 IOLA and Ole Laursen. Licensed under the MIT license. Consider a dataset like [["February", 34], ["March", 20], ...]. This plugin allows you to plot such a dataset directly. To enable it, you must specify mode: "categories" on the axis with the textual labels, e.g. $.plot("#placeholder", data, { xaxis: { mode: "categories" } }); By default, the labels are ordered as they are met in the data series. If you need a different ordering, you can specify "categories" on the axis options and list the categories there: xaxis: { mode: "categories", categories: ["February", "March", "April"] } If you need to customize the distances between the categories, you can specify "categories" as an object mapping labels to values xaxis: { mode: "categories", categories: { "February": 1, "March": 3, "April": 4 } } If you don't specify all categories, the remaining categories will be numbered from the max value plus 1 (with a spacing of 1 between each). Internally, the plugin works by transforming the input data through an auto- generated mapping where the first category becomes 0, the second 1, etc. Hence, a point like ["February", 34] becomes [0, 34] internally in Flot (this is visible in hover and click events that return numbers rather than the category labels). The plugin also overrides the tick generator to spit out the categories as ticks instead of the values. If you need to map a value back to its label, the mapping is always accessible as "categories" on the axis object, e.g. plot.getAxes().xaxis.categories. */ (function ($) { var options = { xaxis: { categories: null }, yaxis: { categories: null } }; function processRawData(plot, series, data, datapoints) { // if categories are enabled, we need to disable // auto-transformation to numbers so the strings are intact // for later processing var xCategories = series.xaxis.options.mode == "categories", yCategories = series.yaxis.options.mode == "categories"; if (!(xCategories || yCategories)) return; var format = datapoints.format; if (!format) { // FIXME: auto-detection should really not be defined here var s = series; format = []; format.push({ x: true, number: true, required: true }); format.push({ y: true, number: true, required: true }); if (s.bars.show || (s.lines.show && s.lines.fill)) { var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero)); format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale }); if (s.bars.horizontal) { delete format[format.length - 1].y; format[format.length - 1].x = true; } } datapoints.format = format; } for (var m = 0; m < format.length; ++m) { if (format[m].x && xCategories) format[m].number = false; if (format[m].y && yCategories) format[m].number = false; } } function getNextIndex(categories) { var index = -1; for (var v in categories) if (categories[v] > index) index = categories[v]; return index + 1; } function categoriesTickGenerator(axis) { var res = []; for (var label in axis.categories) { var v = axis.categories[label]; if (v >= axis.min && v <= axis.max) res.push([v, label]); } res.sort(function (a, b) { return a[0] - b[0]; }); return res; } function setupCategoriesForAxis(series, axis, datapoints) { if (series[axis].options.mode != "categories") return; if (!series[axis].categories) { // parse options var c = {}, o = series[axis].options.categories || {}; if ($.isArray(o)) { for (var i = 0; i < o.length; ++i) c[o[i]] = i; } else { for (var v in o) c[v] = o[v]; } series[axis].categories = c; } // fix ticks if (!series[axis].options.ticks) series[axis].options.ticks = categoriesTickGenerator; transformPointsOnAxis(datapoints, axis, series[axis].categories); } function transformPointsOnAxis(datapoints, axis, categories) { // go through the points, transforming them var points = datapoints.points, ps = datapoints.pointsize, format = datapoints.format, formatColumn = axis.charAt(0), index = getNextIndex(categories); for (var i = 0; i < points.length; i += ps) { if (points[i] == null) continue; for (var m = 0; m < ps; ++m) { var val = points[i + m]; if (val == null || !format[m][formatColumn]) continue; if (!(val in categories)) { categories[val] = index; ++index; } points[i + m] = categories[val]; } } } function processDatapoints(plot, series, datapoints) { setupCategoriesForAxis(series, "xaxis", datapoints); setupCategoriesForAxis(series, "yaxis", datapoints); } function init(plot) { plot.hooks.processRawData.push(processRawData); plot.hooks.processDatapoints.push(processDatapoints); } $.plot.plugins.push({ init: init, options: options, name: 'categories', version: '1.0' }); })(jQuery); ================================================ FILE: vendor/assets/javascripts/jquery.flot.errorbars.js ================================================ /* Flot plugin for plotting error bars. Copyright (c) 2007-2013 IOLA and Ole Laursen. Licensed under the MIT license. Error bars are used to show standard deviation and other statistical properties in a plot. * Created by Rui Pereira - rui (dot) pereira (at) gmail (dot) com This plugin allows you to plot error-bars over points. Set "errorbars" inside the points series to the axis name over which there will be error values in your data array (*even* if you do not intend to plot them later, by setting "show: null" on xerr/yerr). The plugin supports these options: series: { points: { errorbars: "x" or "y" or "xy", xerr: { show: null/false or true, asymmetric: null/false or true, upperCap: null or "-" or function, lowerCap: null or "-" or function, color: null or color, radius: null or number }, yerr: { same options as xerr } } } Each data point array is expected to be of the type: "x" [ x, y, xerr ] "y" [ x, y, yerr ] "xy" [ x, y, xerr, yerr ] Where xerr becomes xerr_lower,xerr_upper for the asymmetric error case, and equivalently for yerr. Eg., a datapoint for the "xy" case with symmetric error-bars on X and asymmetric on Y would be: [ x, y, xerr, yerr_lower, yerr_upper ] By default no end caps are drawn. Setting upperCap and/or lowerCap to "-" will draw a small cap perpendicular to the error bar. They can also be set to a user-defined drawing function, with (ctx, x, y, radius) as parameters, as eg. function drawSemiCircle( ctx, x, y, radius ) { ctx.beginPath(); ctx.arc( x, y, radius, 0, Math.PI, false ); ctx.moveTo( x - radius, y ); ctx.lineTo( x + radius, y ); ctx.stroke(); } Color and radius both default to the same ones of the points series if not set. The independent radius parameter on xerr/yerr is useful for the case when we may want to add error-bars to a line, without showing the interconnecting points (with radius: 0), and still showing end caps on the error-bars. shadowSize and lineWidth are derived as well from the points series. */ (function ($) { var options = { series: { points: { errorbars: null, //should be 'x', 'y' or 'xy' xerr: { err: 'x', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null}, yerr: { err: 'y', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null} } } }; function processRawData(plot, series, data, datapoints){ if (!series.points.errorbars) return; // x,y values var format = [ { x: true, number: true, required: true }, { y: true, number: true, required: true } ]; var errors = series.points.errorbars; // error bars - first X then Y if (errors == 'x' || errors == 'xy') { // lower / upper error if (series.points.xerr.asymmetric) { format.push({ x: true, number: true, required: true }); format.push({ x: true, number: true, required: true }); } else format.push({ x: true, number: true, required: true }); } if (errors == 'y' || errors == 'xy') { // lower / upper error if (series.points.yerr.asymmetric) { format.push({ y: true, number: true, required: true }); format.push({ y: true, number: true, required: true }); } else format.push({ y: true, number: true, required: true }); } datapoints.format = format; } function parseErrors(series, i){ var points = series.datapoints.points; // read errors from points array var exl = null, exu = null, eyl = null, eyu = null; var xerr = series.points.xerr, yerr = series.points.yerr; var eb = series.points.errorbars; // error bars - first X if (eb == 'x' || eb == 'xy') { if (xerr.asymmetric) { exl = points[i + 2]; exu = points[i + 3]; if (eb == 'xy') if (yerr.asymmetric){ eyl = points[i + 4]; eyu = points[i + 5]; } else eyl = points[i + 4]; } else { exl = points[i + 2]; if (eb == 'xy') if (yerr.asymmetric) { eyl = points[i + 3]; eyu = points[i + 4]; } else eyl = points[i + 3]; } // only Y } else if (eb == 'y') if (yerr.asymmetric) { eyl = points[i + 2]; eyu = points[i + 3]; } else eyl = points[i + 2]; // symmetric errors? if (exu == null) exu = exl; if (eyu == null) eyu = eyl; var errRanges = [exl, exu, eyl, eyu]; // nullify if not showing if (!xerr.show){ errRanges[0] = null; errRanges[1] = null; } if (!yerr.show){ errRanges[2] = null; errRanges[3] = null; } return errRanges; } function drawSeriesErrors(plot, ctx, s){ var points = s.datapoints.points, ps = s.datapoints.pointsize, ax = [s.xaxis, s.yaxis], radius = s.points.radius, err = [s.points.xerr, s.points.yerr]; //sanity check, in case some inverted axis hack is applied to flot var invertX = false; if (ax[0].p2c(ax[0].max) < ax[0].p2c(ax[0].min)) { invertX = true; var tmp = err[0].lowerCap; err[0].lowerCap = err[0].upperCap; err[0].upperCap = tmp; } var invertY = false; if (ax[1].p2c(ax[1].min) < ax[1].p2c(ax[1].max)) { invertY = true; var tmp = err[1].lowerCap; err[1].lowerCap = err[1].upperCap; err[1].upperCap = tmp; } for (var i = 0; i < s.datapoints.points.length; i += ps) { //parse var errRanges = parseErrors(s, i); //cycle xerr & yerr for (var e = 0; e < err.length; e++){ var minmax = [ax[e].min, ax[e].max]; //draw this error? if (errRanges[e * err.length]){ //data coordinates var x = points[i], y = points[i + 1]; //errorbar ranges var upper = [x, y][e] + errRanges[e * err.length + 1], lower = [x, y][e] - errRanges[e * err.length]; //points outside of the canvas if (err[e].err == 'x') if (y > ax[1].max || y < ax[1].min || upper < ax[0].min || lower > ax[0].max) continue; if (err[e].err == 'y') if (x > ax[0].max || x < ax[0].min || upper < ax[1].min || lower > ax[1].max) continue; // prevent errorbars getting out of the canvas var drawUpper = true, drawLower = true; if (upper > minmax[1]) { drawUpper = false; upper = minmax[1]; } if (lower < minmax[0]) { drawLower = false; lower = minmax[0]; } //sanity check, in case some inverted axis hack is applied to flot if ((err[e].err == 'x' && invertX) || (err[e].err == 'y' && invertY)) { //swap coordinates var tmp = lower; lower = upper; upper = tmp; tmp = drawLower; drawLower = drawUpper; drawUpper = tmp; tmp = minmax[0]; minmax[0] = minmax[1]; minmax[1] = tmp; } // convert to pixels x = ax[0].p2c(x), y = ax[1].p2c(y), upper = ax[e].p2c(upper); lower = ax[e].p2c(lower); minmax[0] = ax[e].p2c(minmax[0]); minmax[1] = ax[e].p2c(minmax[1]); //same style as points by default var lw = err[e].lineWidth ? err[e].lineWidth : s.points.lineWidth, sw = s.points.shadowSize != null ? s.points.shadowSize : s.shadowSize; //shadow as for points if (lw > 0 && sw > 0) { var w = sw / 2; ctx.lineWidth = w; ctx.strokeStyle = "rgba(0,0,0,0.1)"; drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w + w/2, minmax); ctx.strokeStyle = "rgba(0,0,0,0.2)"; drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w/2, minmax); } ctx.strokeStyle = err[e].color? err[e].color: s.color; ctx.lineWidth = lw; //draw it drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, 0, minmax); } } } } function drawError(ctx,err,x,y,upper,lower,drawUpper,drawLower,radius,offset,minmax){ //shadow offset y += offset; upper += offset; lower += offset; // error bar - avoid plotting over circles if (err.err == 'x'){ if (upper > x + radius) drawPath(ctx, [[upper,y],[Math.max(x + radius,minmax[0]),y]]); else drawUpper = false; if (lower < x - radius) drawPath(ctx, [[Math.min(x - radius,minmax[1]),y],[lower,y]] ); else drawLower = false; } else { if (upper < y - radius) drawPath(ctx, [[x,upper],[x,Math.min(y - radius,minmax[0])]] ); else drawUpper = false; if (lower > y + radius) drawPath(ctx, [[x,Math.max(y + radius,minmax[1])],[x,lower]] ); else drawLower = false; } //internal radius value in errorbar, allows to plot radius 0 points and still keep proper sized caps //this is a way to get errorbars on lines without visible connecting dots radius = err.radius != null? err.radius: radius; // upper cap if (drawUpper) { if (err.upperCap == '-'){ if (err.err=='x') drawPath(ctx, [[upper,y - radius],[upper,y + radius]] ); else drawPath(ctx, [[x - radius,upper],[x + radius,upper]] ); } else if ($.isFunction(err.upperCap)){ if (err.err=='x') err.upperCap(ctx, upper, y, radius); else err.upperCap(ctx, x, upper, radius); } } // lower cap if (drawLower) { if (err.lowerCap == '-'){ if (err.err=='x') drawPath(ctx, [[lower,y - radius],[lower,y + radius]] ); else drawPath(ctx, [[x - radius,lower],[x + radius,lower]] ); } else if ($.isFunction(err.lowerCap)){ if (err.err=='x') err.lowerCap(ctx, lower, y, radius); else err.lowerCap(ctx, x, lower, radius); } } } function drawPath(ctx, pts){ ctx.beginPath(); ctx.moveTo(pts[0][0], pts[0][1]); for (var p=1; p < pts.length; p++) ctx.lineTo(pts[p][0], pts[p][1]); ctx.stroke(); } function draw(plot, ctx){ var plotOffset = plot.getPlotOffset(); ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); $.each(plot.getData(), function (i, s) { if (s.points.errorbars && (s.points.xerr.show || s.points.yerr.show)) drawSeriesErrors(plot, ctx, s); }); ctx.restore(); } function init(plot) { plot.hooks.processRawData.push(processRawData); plot.hooks.draw.push(draw); } $.plot.plugins.push({ init: init, options: options, name: 'errorbars', version: '1.0' }); })(jQuery); ================================================ FILE: vendor/assets/javascripts/jquery.flot.js ================================================ /* Javascript plotting library for jQuery, version 0.8.0. Copyright (c) 2007-2013 IOLA and Ole Laursen. Licensed under the MIT license. */ // first an inline dependency, jquery.colorhelpers.js, we inline it here // for convenience /* Plugin for jQuery for working with colors. * * Version 1.1. * * Inspiration from jQuery color animation plugin by John Resig. * * Released under the MIT license by Ole Laursen, October 2009. * * Examples: * * $.color.parse("#fff").scale('rgb', 0.25).add('a', -0.5).toString() * var c = $.color.extract($("#mydiv"), 'background-color'); * console.log(c.r, c.g, c.b, c.a); * $.color.make(100, 50, 25, 0.4).toString() // returns "rgba(100,50,25,0.4)" * * Note that .scale() and .add() return the same modified object * instead of making a new one. * * V. 1.1: Fix error handling so e.g. parsing an empty string does * produce a color rather than just crashing. */ (function(B){B.color={};B.color.make=function(F,E,C,D){var G={};G.r=F||0;G.g=E||0;G.b=C||0;G.a=D!=null?D:1;G.add=function(J,I){for(var H=0;H=1){return"rgb("+[G.r,G.g,G.b].join(",")+")"}else{return"rgba("+[G.r,G.g,G.b,G.a].join(",")+")"}};G.normalize=function(){function H(J,K,I){return KI?I:K)}G.r=H(0,parseInt(G.r),255);G.g=H(0,parseInt(G.g),255);G.b=H(0,parseInt(G.b),255);G.a=H(0,G.a,1);return G};G.clone=function(){return B.color.make(G.r,G.b,G.g,G.a)};return G.normalize()};B.color.extract=function(D,C){var E;do{E=D.css(C).toLowerCase();if(E!=""&&E!="transparent"){break}D=D.parent()}while(!B.nodeName(D.get(0),"body"));if(E=="rgba(0, 0, 0, 0)"){E="transparent"}return B.color.parse(E)};B.color.parse=function(F){var E,C=B.color.make;if(E=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10))}if(E=/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10),parseFloat(E[4]))}if(E=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55)}if(E=/rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55,parseFloat(E[4]))}if(E=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(F)){return C(parseInt(E[1],16),parseInt(E[2],16),parseInt(E[3],16))}if(E=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(F)){return C(parseInt(E[1]+E[1],16),parseInt(E[2]+E[2],16),parseInt(E[3]+E[3],16))}var D=B.trim(F).toLowerCase();if(D=="transparent"){return C(255,255,255,0)}else{E=A[D]||[0,0,0];return C(E[0],E[1],E[2])}};var A={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(jQuery); // the actual Flot code (function($) { // Cache the prototype hasOwnProperty for faster access var hasOwnProperty = Object.prototype.hasOwnProperty; /////////////////////////////////////////////////////////////////////////// // The Canvas object is a wrapper around an HTML5 tag. // // @constructor // @param {string} cls List of classes to apply to the canvas. // @param {element} container Element onto which to append the canvas. // // Requiring a container is a little iffy, but unfortunately canvas // operations don't work unless the canvas is attached to the DOM. function Canvas(cls, container) { var element = container.children("." + cls)[0]; if (element == null) { element = document.createElement("canvas"); element.className = cls; $(element).css({ direction: "ltr", position: "absolute", left: 0, top: 0 }) .appendTo(container); // If HTML5 Canvas isn't available, fall back to [Ex|Flash]canvas if (!element.getContext) { if (window.G_vmlCanvasManager) { element = window.G_vmlCanvasManager.initElement(element); } else { throw new Error("Canvas is not available. If you're using IE with a fall-back such as Excanvas, then there's either a mistake in your conditional include, or the page has no DOCTYPE and is rendering in Quirks Mode."); } } } this.element = element; var context = this.context = element.getContext("2d"); // Determine the screen's ratio of physical to device-independent // pixels. This is the ratio between the canvas width that the browser // advertises and the number of pixels actually present in that space. // The iPhone 4, for example, has a device-independent width of 320px, // but its screen is actually 640px wide. It therefore has a pixel // ratio of 2, while most normal devices have a ratio of 1. var devicePixelRatio = window.devicePixelRatio || 1, backingStoreRatio = context.webkitBackingStorePixelRatio || context.mozBackingStorePixelRatio || context.msBackingStorePixelRatio || context.oBackingStorePixelRatio || context.backingStorePixelRatio || 1; this.pixelRatio = devicePixelRatio / backingStoreRatio; // Size the canvas to match the internal dimensions of its container this.resize(container.width(), container.height()); // Collection of HTML div layers for text overlaid onto the canvas this.textContainer = null; this.text = {}; // Cache of text fragments and metrics, so we can avoid expensively // re-calculating them when the plot is re-rendered in a loop. this._textCache = {}; } // Resizes the canvas to the given dimensions. // // @param {number} width New width of the canvas, in pixels. // @param {number} width New height of the canvas, in pixels. Canvas.prototype.resize = function(width, height) { if (width <= 0 || height <= 0) { throw new Error("Invalid dimensions for plot, width = " + width + ", height = " + height); } var element = this.element, context = this.context, pixelRatio = this.pixelRatio; // Resize the canvas, increasing its density based on the display's // pixel ratio; basically giving it more pixels without increasing the // size of its element, to take advantage of the fact that retina // displays have that many more pixels in the same advertised space. // Resizing should reset the state (excanvas seems to be buggy though) if (this.width != width) { element.width = width * pixelRatio; element.style.width = width + "px"; this.width = width; } if (this.height != height) { element.height = height * pixelRatio; element.style.height = height + "px"; this.height = height; } // Save the context, so we can reset in case we get replotted. The // restore ensure that we're really back at the initial state, and // should be safe even if we haven't saved the initial state yet. context.restore(); context.save(); // Scale the coordinate space to match the display density; so even though we // may have twice as many pixels, we still want lines and other drawing to // appear at the same size; the extra pixels will just make them crisper. context.scale(pixelRatio, pixelRatio); }; // Clears the entire canvas area, not including any overlaid HTML text Canvas.prototype.clear = function() { this.context.clearRect(0, 0, this.width, this.height); }; // Finishes rendering the canvas, including managing the text overlay. Canvas.prototype.render = function() { var cache = this._textCache; // For each text layer, add elements marked as active that haven't // already been rendered, and remove those that are no longer active. for (var layerKey in cache) { if (hasOwnProperty.call(cache, layerKey)) { var layer = this.getTextLayer(layerKey), layerCache = cache[layerKey]; layer.hide(); for (var styleKey in layerCache) { if (hasOwnProperty.call(layerCache, styleKey)) { var styleCache = layerCache[styleKey]; for (var key in styleCache) { if (hasOwnProperty.call(styleCache, key)) { var info = styleCache[key]; if (info.active) { if (!info.rendered) { layer.append(info.element); info.rendered = true; } } else { delete styleCache[key]; if (info.rendered) { info.element.detach(); } } } } } } layer.show(); } } }; // Creates (if necessary) and returns the text overlay container. // // @param {string} classes String of space-separated CSS classes used to // uniquely identify the text layer. // @return {object} The jQuery-wrapped text-layer div. Canvas.prototype.getTextLayer = function(classes) { var layer = this.text[classes]; // Create the text layer if it doesn't exist if (layer == null) { // Create the text layer container, if it doesn't exist if (this.textContainer == null) { this.textContainer = $("
") .css({ position: "absolute", top: 0, left: 0, bottom: 0, right: 0, 'font-size': "smaller", color: "#545454" }) .insertAfter(this.element); } layer = this.text[classes] = $("
") .addClass(classes) .css({ position: "absolute", top: 0, left: 0, bottom: 0, right: 0 }) .appendTo(this.textContainer); } return layer; }; // Creates (if necessary) and returns a text info object. // // The object looks like this: // // { // width: Width of the text's wrapper div. // height: Height of the text's wrapper div. // active: Flag indicating whether the text should be visible. // rendered: Flag indicating whether the text is currently visible. // element: The jQuery-wrapped HTML div containing the text. // } // // Canvas maintains a cache of recently-used text info objects; getTextInfo // either returns the cached element or creates a new entry. // // @param {string} layer A string of space-separated CSS classes uniquely // identifying the layer containing this text. // @param {string} text Text string to retrieve info for. // @param {(string|object)=} font Either a string of space-separated CSS // classes or a font-spec object, defining the text's font and style. // @param {number=} angle Angle at which to rotate the text, in degrees. // Angle is currently unused, it will be implemented in the future. // @return {object} a text info object. Canvas.prototype.getTextInfo = function(layer, text, font, angle) { var textStyle, layerCache, styleCache, info; // Cast the value to a string, in case we were given a number or such text = "" + text; // If the font is a font-spec object, generate a CSS font definition if (typeof font === "object") { textStyle = font.style + " " + font.variant + " " + font.weight + " " + font.size + "px/" + font.lineHeight + "px " + font.family; } else { textStyle = font; } // Retrieve (or create) the cache for the text's layer and styles layerCache = this._textCache[layer]; if (layerCache == null) { layerCache = this._textCache[layer] = {}; } styleCache = layerCache[textStyle]; if (styleCache == null) { styleCache = layerCache[textStyle] = {}; } info = styleCache[text]; // If we can't find a matching element in our cache, create a new one if (info == null) { var element = $("
").html(text) .css({ position: "absolute", top: -9999 }) .appendTo(this.getTextLayer(layer)); if (typeof font === "object") { element.css({ font: textStyle, color: font.color }); } else if (typeof font === "string") { element.addClass(font); } info = styleCache[text] = { active: false, rendered: false, element: element, width: element.outerWidth(true), height: element.outerHeight(true) }; element.detach(); } return info; }; // Adds a text string to the canvas text overlay. // // The text isn't drawn immediately; it is marked as rendering, which will // result in its addition to the canvas on the next render pass. // // @param {string} layer A string of space-separated CSS classes uniquely // identifying the layer containing this text. // @param {number} x X coordinate at which to draw the text. // @param {number} y Y coordinate at which to draw the text. // @param {string} text Text string to draw. // @param {(string|object)=} font Either a string of space-separated CSS // classes or a font-spec object, defining the text's font and style. // @param {number=} angle Angle at which to rotate the text, in degrees. // Angle is currently unused, it will be implemented in the future. // @param {string=} halign Horizontal alignment of the text; either "left", // "center" or "right". // @param {string=} valign Vertical alignment of the text; either "top", // "middle" or "bottom". Canvas.prototype.addText = function(layer, x, y, text, font, angle, halign, valign) { var info = this.getTextInfo(layer, text, font, angle); // Mark the div for inclusion in the next render pass info.active = true; // Tweak the div's position to match the text's alignment if (halign == "center") { x -= info.width / 2; } else if (halign == "right") { x -= info.width; } if (valign == "middle") { y -= info.height / 2; } else if (valign == "bottom") { y -= info.height; } // Move the element to its final position within the container info.element.css({ top: Math.round(y), left: Math.round(x) }); }; // Removes one or more text strings from the canvas text overlay. // // If no parameters are given, all text within the layer is removed. // The text is not actually removed; it is simply marked as inactive, which // will result in its removal on the next render pass. // // @param {string} layer A string of space-separated CSS classes uniquely // identifying the layer containing this text. // @param {string} text Text string to remove. // @param {(string|object)=} font Either a string of space-separated CSS // classes or a font-spec object, defining the text's font and style. // @param {number=} angle Angle at which the text is rotated, in degrees. // Angle is currently unused, it will be implemented in the future. Canvas.prototype.removeText = function(layer, text, font, angle) { if (text == null) { var layerCache = this._textCache[layer]; if (layerCache != null) { for (var styleKey in layerCache) { if (hasOwnProperty.call(layerCache, styleKey)) { var styleCache = layerCache[styleKey] for (var key in styleCache) { if (hasOwnProperty.call(styleCache, key)) { styleCache[key].active = false; } } } } } } else { this.getTextInfo(layer, text, font, angle).active = false; } }; /////////////////////////////////////////////////////////////////////////// // The top-level container for the entire plot. function Plot(placeholder, data_, options_, plugins) { // data is on the form: // [ series1, series2 ... ] // where series is either just the data as [ [x1, y1], [x2, y2], ... ] // or { data: [ [x1, y1], [x2, y2], ... ], label: "some label", ... } var series = [], options = { // the color theme used for graphs colors: ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"], legend: { show: true, noColumns: 1, // number of colums in legend table labelFormatter: null, // fn: string -> string labelBoxBorderColor: "#ccc", // border color for the little label boxes container: null, // container (as jQuery object) to put legend in, null means default on top of graph position: "ne", // position of default legend container within plot margin: 5, // distance from grid edge to default legend container within plot backgroundColor: null, // null means auto-detect backgroundOpacity: 0.85, // set to 0 to avoid background sorted: null // default to no legend sorting }, xaxis: { show: null, // null = auto-detect, true = always, false = never position: "bottom", // or "top" mode: null, // null or "time" font: null, // null (derived from CSS in placeholder) or object like { size: 11, lineHeight: 13, style: "italic", weight: "bold", family: "sans-serif", variant: "small-caps" } color: null, // base color, labels, ticks tickColor: null, // possibly different color of ticks, e.g. "rgba(0,0,0,0.15)" transform: null, // null or f: number -> number to transform axis inverseTransform: null, // if transform is set, this should be the inverse function min: null, // min. value to show, null means set automatically max: null, // max. value to show, null means set automatically autoscaleMargin: null, // margin in % to add if auto-setting min/max ticks: null, // either [1, 3] or [[1, "a"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks tickFormatter: null, // fn: number -> string labelWidth: null, // size of tick labels in pixels labelHeight: null, reserveSpace: null, // whether to reserve space even if axis isn't shown tickLength: null, // size in pixels of ticks, or "full" for whole line alignTicksWithAxis: null, // axis number or null for no sync tickDecimals: null, // no. of decimals, null means auto tickSize: null, // number or [number, "unit"] minTickSize: null // number or [number, "unit"] }, yaxis: { autoscaleMargin: 0.02, position: "left" // or "right" }, xaxes: [], yaxes: [], series: { points: { show: false, radius: 3, lineWidth: 2, // in pixels fill: true, fillColor: "#ffffff", symbol: "circle" // or callback }, lines: { // we don't put in show: false so we can see // whether lines were actively disabled lineWidth: 2, // in pixels fill: false, fillColor: null, steps: false // Omit 'zero', so we can later default its value to // match that of the 'fill' option. }, bars: { show: false, lineWidth: 2, // in pixels barWidth: 1, // in units of the x axis fill: true, fillColor: null, align: "left", // "left", "right", or "center" horizontal: false, zero: true }, shadowSize: 3, highlightColor: null }, grid: { show: true, aboveData: false, color: "#545454", // primary color used for outline and labels backgroundColor: null, // null for transparent, else color borderColor: null, // set if different from the grid color tickColor: null, // color for the ticks, e.g. "rgba(0,0,0,0.15)" margin: 0, // distance from the canvas edge to the grid labelMargin: 5, // in pixels axisMargin: 8, // in pixels borderWidth: 2, // in pixels minBorderMargin: null, // in pixels, null means taken from points radius markings: null, // array of ranges or fn: axes -> array of ranges markingsColor: "#f4f4f4", markingsLineWidth: 2, // interactive stuff clickable: false, hoverable: false, autoHighlight: true, // highlight in case mouse is near mouseActiveRadius: 10 // how far the mouse can be away to activate an item }, interaction: { redrawOverlayInterval: 1000/60 // time between updates, -1 means in same flow }, hooks: {} }, surface = null, // the canvas for the plot itself overlay = null, // canvas for interactive stuff on top of plot eventHolder = null, // jQuery object that events should be bound to ctx = null, octx = null, xaxes = [], yaxes = [], plotOffset = { left: 0, right: 0, top: 0, bottom: 0}, plotWidth = 0, plotHeight = 0, hooks = { processOptions: [], processRawData: [], processDatapoints: [], processOffset: [], drawBackground: [], drawSeries: [], draw: [], bindEvents: [], drawOverlay: [], shutdown: [] }, plot = this; // public functions plot.setData = setData; plot.setupGrid = setupGrid; plot.draw = draw; plot.getPlaceholder = function() { return placeholder; }; plot.getCanvas = function() { return surface.element; }; plot.getPlotOffset = function() { return plotOffset; }; plot.width = function () { return plotWidth; }; plot.height = function () { return plotHeight; }; plot.offset = function () { var o = eventHolder.offset(); o.left += plotOffset.left; o.top += plotOffset.top; return o; }; plot.getData = function () { return series; }; plot.getAxes = function () { var res = {}, i; $.each(xaxes.concat(yaxes), function (_, axis) { if (axis) res[axis.direction + (axis.n != 1 ? axis.n : "") + "axis"] = axis; }); return res; }; plot.getXAxes = function () { return xaxes; }; plot.getYAxes = function () { return yaxes; }; plot.c2p = canvasToAxisCoords; plot.p2c = axisToCanvasCoords; plot.getOptions = function () { return options; }; plot.highlight = highlight; plot.unhighlight = unhighlight; plot.triggerRedrawOverlay = triggerRedrawOverlay; plot.pointOffset = function(point) { return { left: parseInt(xaxes[axisNumber(point, "x") - 1].p2c(+point.x) + plotOffset.left, 10), top: parseInt(yaxes[axisNumber(point, "y") - 1].p2c(+point.y) + plotOffset.top, 10) }; }; plot.shutdown = shutdown; plot.resize = function () { var width = placeholder.width(), height = placeholder.height(); surface.resize(width, height); overlay.resize(width, height); }; // public attributes plot.hooks = hooks; // initialize initPlugins(plot); parseOptions(options_); setupCanvases(); setData(data_); setupGrid(); draw(); bindEvents(); function executeHooks(hook, args) { args = [plot].concat(args); for (var i = 0; i < hook.length; ++i) hook[i].apply(this, args); } function initPlugins() { // References to key classes, allowing plugins to modify them var classes = { Canvas: Canvas }; for (var i = 0; i < plugins.length; ++i) { var p = plugins[i]; p.init(plot, classes); if (p.options) $.extend(true, options, p.options); } } function parseOptions(opts) { $.extend(true, options, opts); if (options.xaxis.color == null) options.xaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString(); if (options.yaxis.color == null) options.yaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString(); if (options.xaxis.tickColor == null) // grid.tickColor for back-compatibility options.xaxis.tickColor = options.grid.tickColor || options.xaxis.color; if (options.yaxis.tickColor == null) // grid.tickColor for back-compatibility options.yaxis.tickColor = options.grid.tickColor || options.yaxis.color; if (options.grid.borderColor == null) options.grid.borderColor = options.grid.color; if (options.grid.tickColor == null) options.grid.tickColor = $.color.parse(options.grid.color).scale('a', 0.22).toString(); // Fill in defaults for axis options, including any unspecified // font-spec fields, if a font-spec was provided. // If no x/y axis options were provided, create one of each anyway, // since the rest of the code assumes that they exist. var i, axisOptions, axisCount, fontDefaults = { style: placeholder.css("font-style"), size: Math.round(0.8 * (+placeholder.css("font-size").replace("px", "") || 13)), variant: placeholder.css("font-variant"), weight: placeholder.css("font-weight"), family: placeholder.css("font-family") }; fontDefaults.lineHeight = fontDefaults.size * 1.15; axisCount = options.xaxes.length || 1; for (i = 0; i < axisCount; ++i) { axisOptions = options.xaxes[i]; if (axisOptions && !axisOptions.tickColor) { axisOptions.tickColor = axisOptions.color; } axisOptions = $.extend(true, {}, options.xaxis, axisOptions); options.xaxes[i] = axisOptions; if (axisOptions.font) { axisOptions.font = $.extend({}, fontDefaults, axisOptions.font); if (!axisOptions.font.color) { axisOptions.font.color = axisOptions.color; } } } axisCount = options.yaxes.length || 1; for (i = 0; i < axisCount; ++i) { axisOptions = options.yaxes[i]; if (axisOptions && !axisOptions.tickColor) { axisOptions.tickColor = axisOptions.color; } axisOptions = $.extend(true, {}, options.yaxis, axisOptions); options.yaxes[i] = axisOptions; if (axisOptions.font) { axisOptions.font = $.extend({}, fontDefaults, axisOptions.font); if (!axisOptions.font.color) { axisOptions.font.color = axisOptions.color; } } } // backwards compatibility, to be removed in future if (options.xaxis.noTicks && options.xaxis.ticks == null) options.xaxis.ticks = options.xaxis.noTicks; if (options.yaxis.noTicks && options.yaxis.ticks == null) options.yaxis.ticks = options.yaxis.noTicks; if (options.x2axis) { options.xaxes[1] = $.extend(true, {}, options.xaxis, options.x2axis); options.xaxes[1].position = "top"; } if (options.y2axis) { options.yaxes[1] = $.extend(true, {}, options.yaxis, options.y2axis); options.yaxes[1].position = "right"; } if (options.grid.coloredAreas) options.grid.markings = options.grid.coloredAreas; if (options.grid.coloredAreasColor) options.grid.markingsColor = options.grid.coloredAreasColor; if (options.lines) $.extend(true, options.series.lines, options.lines); if (options.points) $.extend(true, options.series.points, options.points); if (options.bars) $.extend(true, options.series.bars, options.bars); if (options.shadowSize != null) options.series.shadowSize = options.shadowSize; if (options.highlightColor != null) options.series.highlightColor = options.highlightColor; // save options on axes for future reference for (i = 0; i < options.xaxes.length; ++i) getOrCreateAxis(xaxes, i + 1).options = options.xaxes[i]; for (i = 0; i < options.yaxes.length; ++i) getOrCreateAxis(yaxes, i + 1).options = options.yaxes[i]; // add hooks from options for (var n in hooks) if (options.hooks[n] && options.hooks[n].length) hooks[n] = hooks[n].concat(options.hooks[n]); executeHooks(hooks.processOptions, [options]); } function setData(d) { series = parseData(d); fillInSeriesOptions(); processData(); } function parseData(d) { var res = []; for (var i = 0; i < d.length; ++i) { var s = $.extend(true, {}, options.series); if (d[i].data != null) { s.data = d[i].data; // move the data instead of deep-copy delete d[i].data; $.extend(true, s, d[i]); d[i].data = s.data; } else s.data = d[i]; res.push(s); } return res; } function axisNumber(obj, coord) { var a = obj[coord + "axis"]; if (typeof a == "object") // if we got a real axis, extract number a = a.n; if (typeof a != "number") a = 1; // default to first axis return a; } function allAxes() { // return flat array without annoying null entries return $.grep(xaxes.concat(yaxes), function (a) { return a; }); } function canvasToAxisCoords(pos) { // return an object with x/y corresponding to all used axes var res = {}, i, axis; for (i = 0; i < xaxes.length; ++i) { axis = xaxes[i]; if (axis && axis.used) res["x" + axis.n] = axis.c2p(pos.left); } for (i = 0; i < yaxes.length; ++i) { axis = yaxes[i]; if (axis && axis.used) res["y" + axis.n] = axis.c2p(pos.top); } if (res.x1 !== undefined) res.x = res.x1; if (res.y1 !== undefined) res.y = res.y1; return res; } function axisToCanvasCoords(pos) { // get canvas coords from the first pair of x/y found in pos var res = {}, i, axis, key; for (i = 0; i < xaxes.length; ++i) { axis = xaxes[i]; if (axis && axis.used) { key = "x" + axis.n; if (pos[key] == null && axis.n == 1) key = "x"; if (pos[key] != null) { res.left = axis.p2c(pos[key]); break; } } } for (i = 0; i < yaxes.length; ++i) { axis = yaxes[i]; if (axis && axis.used) { key = "y" + axis.n; if (pos[key] == null && axis.n == 1) key = "y"; if (pos[key] != null) { res.top = axis.p2c(pos[key]); break; } } } return res; } function getOrCreateAxis(axes, number) { if (!axes[number - 1]) axes[number - 1] = { n: number, // save the number for future reference direction: axes == xaxes ? "x" : "y", options: $.extend(true, {}, axes == xaxes ? options.xaxis : options.yaxis) }; return axes[number - 1]; } function fillInSeriesOptions() { var neededColors = series.length, maxIndex = -1, i; // Subtract the number of series that already have fixed colors or // color indexes from the number that we still need to generate. for (i = 0; i < series.length; ++i) { var sc = series[i].color; if (sc != null) { neededColors--; if (typeof sc == "number" && sc > maxIndex) { maxIndex = sc; } } } // If any of the series have fixed color indexes, then we need to // generate at least as many colors as the highest index. if (neededColors <= maxIndex) { neededColors = maxIndex + 1; } // Generate all the colors, using first the option colors and then // variations on those colors once they're exhausted. var c, colors = [], colorPool = options.colors, colorPoolSize = colorPool.length, variation = 0; for (i = 0; i < neededColors; i++) { c = $.color.parse(colorPool[i % colorPoolSize] || "#666"); // Each time we exhaust the colors in the pool we adjust // a scaling factor used to produce more variations on // those colors. The factor alternates negative/positive // to produce lighter/darker colors. // Reset the variation after every few cycles, or else // it will end up producing only white or black colors. if (i % colorPoolSize == 0 && i) { if (variation >= 0) { if (variation < 0.5) { variation = -variation - 0.2; } else variation = 0; } else variation = -variation; } colors[i] = c.scale('rgb', 1 + variation); } // Finalize the series options, filling in their colors var colori = 0, s; for (i = 0; i < series.length; ++i) { s = series[i]; // assign colors if (s.color == null) { s.color = colors[colori].toString(); ++colori; } else if (typeof s.color == "number") s.color = colors[s.color].toString(); // turn on lines automatically in case nothing is set if (s.lines.show == null) { var v, show = true; for (v in s) if (s[v] && s[v].show) { show = false; break; } if (show) s.lines.show = true; } // If nothing was provided for lines.zero, default it to match // lines.fill, since areas by default should extend to zero. if (s.lines.zero == null) { s.lines.zero = !!s.lines.fill; } // setup axes s.xaxis = getOrCreateAxis(xaxes, axisNumber(s, "x")); s.yaxis = getOrCreateAxis(yaxes, axisNumber(s, "y")); } } function processData() { var topSentry = Number.POSITIVE_INFINITY, bottomSentry = Number.NEGATIVE_INFINITY, fakeInfinity = Number.MAX_VALUE, i, j, k, m, length, s, points, ps, x, y, axis, val, f, p, data, format; function updateAxis(axis, min, max) { if (min < axis.datamin && min != -fakeInfinity) axis.datamin = min; if (max > axis.datamax && max != fakeInfinity) axis.datamax = max; } $.each(allAxes(), function (_, axis) { // init axis axis.datamin = topSentry; axis.datamax = bottomSentry; axis.used = false; }); for (i = 0; i < series.length; ++i) { s = series[i]; s.datapoints = { points: [] }; executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]); } // first pass: clean and copy data for (i = 0; i < series.length; ++i) { s = series[i]; data = s.data; format = s.datapoints.format; if (!format) { format = []; // find out how to copy format.push({ x: true, number: true, required: true }); format.push({ y: true, number: true, required: true }); if (s.bars.show || (s.lines.show && s.lines.fill)) { var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero)); format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale }); if (s.bars.horizontal) { delete format[format.length - 1].y; format[format.length - 1].x = true; } } s.datapoints.format = format; } if (s.datapoints.pointsize != null) continue; // already filled in s.datapoints.pointsize = format.length; ps = s.datapoints.pointsize; points = s.datapoints.points; var insertSteps = s.lines.show && s.lines.steps; s.xaxis.used = s.yaxis.used = true; for (j = k = 0; j < data.length; ++j, k += ps) { p = data[j]; var nullify = p == null; if (!nullify) { for (m = 0; m < ps; ++m) { val = p[m]; f = format[m]; if (f) { if (f.number && val != null) { val = +val; // convert to number if (isNaN(val)) val = null; else if (val == Infinity) val = fakeInfinity; else if (val == -Infinity) val = -fakeInfinity; } if (val == null) { if (f.required) nullify = true; if (f.defaultValue != null) val = f.defaultValue; } } points[k + m] = val; } } if (nullify) { for (m = 0; m < ps; ++m) { val = points[k + m]; if (val != null) { f = format[m]; // extract min/max info if (f.x) updateAxis(s.xaxis, val, val); if (f.y) updateAxis(s.yaxis, val, val); } points[k + m] = null; } } else { // a little bit of line specific stuff that // perhaps shouldn't be here, but lacking // better means... if (insertSteps && k > 0 && points[k - ps] != null && points[k - ps] != points[k] && points[k - ps + 1] != points[k + 1]) { // copy the point to make room for a middle point for (m = 0; m < ps; ++m) points[k + ps + m] = points[k + m]; // middle point has same y points[k + 1] = points[k - ps + 1]; // we've added a point, better reflect that k += ps; } } } } // give the hooks a chance to run for (i = 0; i < series.length; ++i) { s = series[i]; executeHooks(hooks.processDatapoints, [ s, s.datapoints]); } // second pass: find datamax/datamin for auto-scaling for (i = 0; i < series.length; ++i) { s = series[i]; points = s.datapoints.points, ps = s.datapoints.pointsize; format = s.datapoints.format; var xmin = topSentry, ymin = topSentry, xmax = bottomSentry, ymax = bottomSentry; for (j = 0; j < points.length; j += ps) { if (points[j] == null) continue; for (m = 0; m < ps; ++m) { val = points[j + m]; f = format[m]; if (!f || f.autoscale === false || val == fakeInfinity || val == -fakeInfinity) continue; if (f.x) { if (val < xmin) xmin = val; if (val > xmax) xmax = val; } if (f.y) { if (val < ymin) ymin = val; if (val > ymax) ymax = val; } } } if (s.bars.show) { // make sure we got room for the bar on the dancing floor var delta; switch (s.bars.align) { case "left": delta = 0; break; case "right": delta = -s.bars.barWidth; break; case "center": delta = -s.bars.barWidth / 2; break; default: throw new Error("Invalid bar alignment: " + s.bars.align); } if (s.bars.horizontal) { ymin += delta; ymax += delta + s.bars.barWidth; } else { xmin += delta; xmax += delta + s.bars.barWidth; } } updateAxis(s.xaxis, xmin, xmax); updateAxis(s.yaxis, ymin, ymax); } $.each(allAxes(), function (_, axis) { if (axis.datamin == topSentry) axis.datamin = null; if (axis.datamax == bottomSentry) axis.datamax = null; }); } function setupCanvases() { // Make sure the placeholder is clear of everything except canvases // from a previous plot in this container that we'll try to re-use. placeholder.css("padding", 0) // padding messes up the positioning .children(":not(.flot-base,.flot-overlay)").remove(); if (placeholder.css("position") == 'static') placeholder.css("position", "relative"); // for positioning labels and overlay surface = new Canvas("flot-base", placeholder); overlay = new Canvas("flot-overlay", placeholder); // overlay canvas for interactive features ctx = surface.context; octx = overlay.context; // define which element we're listening for events on eventHolder = $(overlay.element).unbind(); // If we're re-using a plot object, shut down the old one var existing = placeholder.data("plot"); if (existing) { existing.shutdown(); overlay.clear(); } // save in case we get replotted placeholder.data("plot", plot); } function bindEvents() { // bind events if (options.grid.hoverable) { eventHolder.mousemove(onMouseMove); // Use bind, rather than .mouseleave, because we officially // still support jQuery 1.2.6, which doesn't define a shortcut // for mouseenter or mouseleave. This was a bug/oversight that // was fixed somewhere around 1.3.x. We can return to using // .mouseleave when we drop support for 1.2.6. eventHolder.bind("mouseleave", onMouseLeave); } if (options.grid.clickable) eventHolder.click(onClick); executeHooks(hooks.bindEvents, [eventHolder]); } function shutdown() { if (redrawTimeout) clearTimeout(redrawTimeout); eventHolder.unbind("mousemove", onMouseMove); eventHolder.unbind("mouseleave", onMouseLeave); eventHolder.unbind("click", onClick); executeHooks(hooks.shutdown, [eventHolder]); } function setTransformationHelpers(axis) { // set helper functions on the axis, assumes plot area // has been computed already function identity(x) { return x; } var s, m, t = axis.options.transform || identity, it = axis.options.inverseTransform; // precompute how much the axis is scaling a point // in canvas space if (axis.direction == "x") { s = axis.scale = plotWidth / Math.abs(t(axis.max) - t(axis.min)); m = Math.min(t(axis.max), t(axis.min)); } else { s = axis.scale = plotHeight / Math.abs(t(axis.max) - t(axis.min)); s = -s; m = Math.max(t(axis.max), t(axis.min)); } // data point to canvas coordinate if (t == identity) // slight optimization axis.p2c = function (p) { return (p - m) * s; }; else axis.p2c = function (p) { return (t(p) - m) * s; }; // canvas coordinate to data point if (!it) axis.c2p = function (c) { return m + c / s; }; else axis.c2p = function (c) { return it(m + c / s); }; } function measureTickLabels(axis) { var opts = axis.options, ticks = axis.ticks || [], axisw = opts.labelWidth || 0, axish = opts.labelHeight || 0, legacyStyles = axis.direction + "Axis " + axis.direction + axis.n + "Axis", layer = "flot-" + axis.direction + "-axis flot-" + axis.direction + axis.n + "-axis " + legacyStyles, font = opts.font || "flot-tick-label tickLabel"; for (var i = 0; i < ticks.length; ++i) { var t = ticks[i]; if (!t.label) continue; var info = surface.getTextInfo(layer, t.label, font); if (opts.labelWidth == null) axisw = Math.max(axisw, info.width); if (opts.labelHeight == null) axish = Math.max(axish, info.height); } axis.labelWidth = Math.ceil(axisw); axis.labelHeight = Math.ceil(axish); } function allocateAxisBoxFirstPhase(axis) { // find the bounding box of the axis by looking at label // widths/heights and ticks, make room by diminishing the // plotOffset; this first phase only looks at one // dimension per axis, the other dimension depends on the // other axes so will have to wait var lw = axis.labelWidth, lh = axis.labelHeight, pos = axis.options.position, tickLength = axis.options.tickLength, axisMargin = options.grid.axisMargin, padding = options.grid.labelMargin, all = axis.direction == "x" ? xaxes : yaxes, index, innermost; // determine axis margin var samePosition = $.grep(all, function (a) { return a && a.options.position == pos && a.reserveSpace; }); if ($.inArray(axis, samePosition) == samePosition.length - 1) axisMargin = 0; // outermost // determine tick length - if we're innermost, we can use "full" if (tickLength == null) { var sameDirection = $.grep(all, function (a) { return a && a.reserveSpace; }); innermost = $.inArray(axis, sameDirection) == 0; if (innermost) tickLength = "full"; else tickLength = 5; } if (!isNaN(+tickLength)) padding += +tickLength; // compute box if (axis.direction == "x") { lh += padding; if (pos == "bottom") { plotOffset.bottom += lh + axisMargin; axis.box = { top: surface.height - plotOffset.bottom, height: lh }; } else { axis.box = { top: plotOffset.top + axisMargin, height: lh }; plotOffset.top += lh + axisMargin; } } else { lw += padding; if (pos == "left") { axis.box = { left: plotOffset.left + axisMargin, width: lw }; plotOffset.left += lw + axisMargin; } else { plotOffset.right += lw + axisMargin; axis.box = { left: surface.width - plotOffset.right, width: lw }; } } // save for future reference axis.position = pos; axis.tickLength = tickLength; axis.box.padding = padding; axis.innermost = innermost; } function allocateAxisBoxSecondPhase(axis) { // now that all axis boxes have been placed in one // dimension, we can set the remaining dimension coordinates if (axis.direction == "x") { axis.box.left = plotOffset.left - axis.labelWidth / 2; axis.box.width = surface.width - plotOffset.left - plotOffset.right + axis.labelWidth; } else { axis.box.top = plotOffset.top - axis.labelHeight / 2; axis.box.height = surface.height - plotOffset.bottom - plotOffset.top + axis.labelHeight; } } function adjustLayoutForThingsStickingOut() { // possibly adjust plot offset to ensure everything stays // inside the canvas and isn't clipped off var minMargin = options.grid.minBorderMargin, margins = { x: 0, y: 0 }, i, axis; // check stuff from the plot (FIXME: this should just read // a value from the series, otherwise it's impossible to // customize) if (minMargin == null) { minMargin = 0; for (i = 0; i < series.length; ++i) minMargin = Math.max(minMargin, 2 * (series[i].points.radius + series[i].points.lineWidth/2)); } margins.x = margins.y = Math.ceil(minMargin); // check axis labels, note we don't check the actual // labels but instead use the overall width/height to not // jump as much around with replots $.each(allAxes(), function (_, axis) { var dir = axis.direction; if (axis.reserveSpace) margins[dir] = Math.ceil(Math.max(margins[dir], (dir == "x" ? axis.labelWidth : axis.labelHeight) / 2)); }); plotOffset.left = Math.max(margins.x, plotOffset.left); plotOffset.right = Math.max(margins.x, plotOffset.right); plotOffset.top = Math.max(margins.y, plotOffset.top); plotOffset.bottom = Math.max(margins.y, plotOffset.bottom); } function setupGrid() { var i, axes = allAxes(), showGrid = options.grid.show; // Initialize the plot's offset from the edge of the canvas for (var a in plotOffset) { var margin = options.grid.margin || 0; plotOffset[a] = typeof margin == "number" ? margin : margin[a] || 0; } executeHooks(hooks.processOffset, [plotOffset]); // If the grid is visible, add its border width to the offset for (var a in plotOffset) { if(typeof(options.grid.borderWidth) == "object") { plotOffset[a] += showGrid ? options.grid.borderWidth[a] : 0; } else { plotOffset[a] += showGrid ? options.grid.borderWidth : 0; } } // init axes $.each(axes, function (_, axis) { axis.show = axis.options.show; if (axis.show == null) axis.show = axis.used; // by default an axis is visible if it's got data axis.reserveSpace = axis.show || axis.options.reserveSpace; setRange(axis); }); if (showGrid) { var allocatedAxes = $.grep(axes, function (axis) { return axis.reserveSpace; }); $.each(allocatedAxes, function (_, axis) { // make the ticks setupTickGeneration(axis); setTicks(axis); snapRangeToTicks(axis, axis.ticks); // find labelWidth/Height for axis measureTickLabels(axis); }); // with all dimensions calculated, we can compute the // axis bounding boxes, start from the outside // (reverse order) for (i = allocatedAxes.length - 1; i >= 0; --i) allocateAxisBoxFirstPhase(allocatedAxes[i]); // make sure we've got enough space for things that // might stick out adjustLayoutForThingsStickingOut(); $.each(allocatedAxes, function (_, axis) { allocateAxisBoxSecondPhase(axis); }); } plotWidth = surface.width - plotOffset.left - plotOffset.right; plotHeight = surface.height - plotOffset.bottom - plotOffset.top; // now we got the proper plot dimensions, we can compute the scaling $.each(axes, function (_, axis) { setTransformationHelpers(axis); }); if (showGrid) { drawAxisLabels(); } insertLegend(); } function setRange(axis) { var opts = axis.options, min = +(opts.min != null ? opts.min : axis.datamin), max = +(opts.max != null ? opts.max : axis.datamax), delta = max - min; if (delta == 0.0) { // degenerate case var widen = max == 0 ? 1 : 0.01; if (opts.min == null) min -= widen; // always widen max if we couldn't widen min to ensure we // don't fall into min == max which doesn't work if (opts.max == null || opts.min != null) max += widen; } else { // consider autoscaling var margin = opts.autoscaleMargin; if (margin != null) { if (opts.min == null) { min -= delta * margin; // make sure we don't go below zero if all values // are positive if (min < 0 && axis.datamin != null && axis.datamin >= 0) min = 0; } if (opts.max == null) { max += delta * margin; if (max > 0 && axis.datamax != null && axis.datamax <= 0) max = 0; } } } axis.min = min; axis.max = max; } function setupTickGeneration(axis) { var opts = axis.options; // estimate number of ticks var noTicks; if (typeof opts.ticks == "number" && opts.ticks > 0) noTicks = opts.ticks; else // heuristic based on the model a*sqrt(x) fitted to // some data points that seemed reasonable noTicks = 0.3 * Math.sqrt(axis.direction == "x" ? surface.width : surface.height); var delta = (axis.max - axis.min) / noTicks, dec = -Math.floor(Math.log(delta) / Math.LN10), maxDec = opts.tickDecimals; if (maxDec != null && dec > maxDec) { dec = maxDec; } var magn = Math.pow(10, -dec), norm = delta / magn, // norm is between 1.0 and 10.0 size; if (norm < 1.5) { size = 1; } else if (norm < 3) { size = 2; // special case for 2.5, requires an extra decimal if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) { size = 2.5; ++dec; } } else if (norm < 7.5) { size = 5; } else { size = 10; } size *= magn; if (opts.minTickSize != null && size < opts.minTickSize) { size = opts.minTickSize; } axis.delta = delta; axis.tickDecimals = Math.max(0, maxDec != null ? maxDec : dec); axis.tickSize = opts.tickSize || size; // Time mode was moved to a plug-in in 0.8, but since so many people use this // we'll add an especially friendly make sure they remembered to include it. if (opts.mode == "time" && !axis.tickGenerator) { throw new Error("Time mode requires the flot.time plugin."); } // Flot supports base-10 axes; any other mode else is handled by a plug-in, // like flot.time.js. if (!axis.tickGenerator) { axis.tickGenerator = function (axis) { var ticks = [], start = floorInBase(axis.min, axis.tickSize), i = 0, v = Number.NaN, prev; do { prev = v; v = start + i * axis.tickSize; ticks.push(v); ++i; } while (v < axis.max && v != prev); return ticks; }; axis.tickFormatter = function (value, axis) { var factor = axis.tickDecimals ? Math.pow(10, axis.tickDecimals) : 1; var formatted = "" + Math.round(value * factor) / factor; // If tickDecimals was specified, ensure that we have exactly that // much precision; otherwise default to the value's own precision. if (axis.tickDecimals != null) { var decimal = formatted.indexOf("."); var precision = decimal == -1 ? 0 : formatted.length - decimal - 1; if (precision < axis.tickDecimals) { return (precision ? formatted : formatted + ".") + ("" + factor).substr(1, axis.tickDecimals - precision); } } return formatted; }; } if ($.isFunction(opts.tickFormatter)) axis.tickFormatter = function (v, axis) { return "" + opts.tickFormatter(v, axis); }; if (opts.alignTicksWithAxis != null) { var otherAxis = (axis.direction == "x" ? xaxes : yaxes)[opts.alignTicksWithAxis - 1]; if (otherAxis && otherAxis.used && otherAxis != axis) { // consider snapping min/max to outermost nice ticks var niceTicks = axis.tickGenerator(axis); if (niceTicks.length > 0) { if (opts.min == null) axis.min = Math.min(axis.min, niceTicks[0]); if (opts.max == null && niceTicks.length > 1) axis.max = Math.max(axis.max, niceTicks[niceTicks.length - 1]); } axis.tickGenerator = function (axis) { // copy ticks, scaled to this axis var ticks = [], v, i; for (i = 0; i < otherAxis.ticks.length; ++i) { v = (otherAxis.ticks[i].v - otherAxis.min) / (otherAxis.max - otherAxis.min); v = axis.min + v * (axis.max - axis.min); ticks.push(v); } return ticks; }; // we might need an extra decimal since forced // ticks don't necessarily fit naturally if (!axis.mode && opts.tickDecimals == null) { var extraDec = Math.max(0, -Math.floor(Math.log(axis.delta) / Math.LN10) + 1), ts = axis.tickGenerator(axis); // only proceed if the tick interval rounded // with an extra decimal doesn't give us a // zero at end if (!(ts.length > 1 && /\..*0$/.test((ts[1] - ts[0]).toFixed(extraDec)))) axis.tickDecimals = extraDec; } } } } function setTicks(axis) { var oticks = axis.options.ticks, ticks = []; if (oticks == null || (typeof oticks == "number" && oticks > 0)) ticks = axis.tickGenerator(axis); else if (oticks) { if ($.isFunction(oticks)) // generate the ticks ticks = oticks(axis); else ticks = oticks; } // clean up/labelify the supplied ticks, copy them over var i, v; axis.ticks = []; for (i = 0; i < ticks.length; ++i) { var label = null; var t = ticks[i]; if (typeof t == "object") { v = +t[0]; if (t.length > 1) label = t[1]; } else v = +t; if (label == null) label = axis.tickFormatter(v, axis); if (!isNaN(v)) axis.ticks.push({ v: v, label: label }); } } function snapRangeToTicks(axis, ticks) { if (axis.options.autoscaleMargin && ticks.length > 0) { // snap to ticks if (axis.options.min == null) axis.min = Math.min(axis.min, ticks[0].v); if (axis.options.max == null && ticks.length > 1) axis.max = Math.max(axis.max, ticks[ticks.length - 1].v); } } function draw() { surface.clear(); executeHooks(hooks.drawBackground, [ctx]); var grid = options.grid; // draw background, if any if (grid.show && grid.backgroundColor) drawBackground(); if (grid.show && !grid.aboveData) { drawGrid(); } for (var i = 0; i < series.length; ++i) { executeHooks(hooks.drawSeries, [ctx, series[i]]); drawSeries(series[i]); } executeHooks(hooks.draw, [ctx]); if (grid.show && grid.aboveData) { drawGrid(); } surface.render(); } function extractRange(ranges, coord) { var axis, from, to, key, axes = allAxes(); for (var i = 0; i < axes.length; ++i) { axis = axes[i]; if (axis.direction == coord) { key = coord + axis.n + "axis"; if (!ranges[key] && axis.n == 1) key = coord + "axis"; // support x1axis as xaxis if (ranges[key]) { from = ranges[key].from; to = ranges[key].to; break; } } } // backwards-compat stuff - to be removed in future if (!ranges[key]) { axis = coord == "x" ? xaxes[0] : yaxes[0]; from = ranges[coord + "1"]; to = ranges[coord + "2"]; } // auto-reverse as an added bonus if (from != null && to != null && from > to) { var tmp = from; from = to; to = tmp; } return { from: from, to: to, axis: axis }; } function drawBackground() { ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, "rgba(255, 255, 255, 0)"); ctx.fillRect(0, 0, plotWidth, plotHeight); ctx.restore(); } function drawGrid() { var i, axes, bw, bc; ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); // draw markings var markings = options.grid.markings; if (markings) { if ($.isFunction(markings)) { axes = plot.getAxes(); // xmin etc. is backwards compatibility, to be // removed in the future axes.xmin = axes.xaxis.min; axes.xmax = axes.xaxis.max; axes.ymin = axes.yaxis.min; axes.ymax = axes.yaxis.max; markings = markings(axes); } for (i = 0; i < markings.length; ++i) { var m = markings[i], xrange = extractRange(m, "x"), yrange = extractRange(m, "y"); // fill in missing if (xrange.from == null) xrange.from = xrange.axis.min; if (xrange.to == null) xrange.to = xrange.axis.max; if (yrange.from == null) yrange.from = yrange.axis.min; if (yrange.to == null) yrange.to = yrange.axis.max; // clip if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max || yrange.to < yrange.axis.min || yrange.from > yrange.axis.max) continue; xrange.from = Math.max(xrange.from, xrange.axis.min); xrange.to = Math.min(xrange.to, xrange.axis.max); yrange.from = Math.max(yrange.from, yrange.axis.min); yrange.to = Math.min(yrange.to, yrange.axis.max); if (xrange.from == xrange.to && yrange.from == yrange.to) continue; // then draw xrange.from = xrange.axis.p2c(xrange.from); xrange.to = xrange.axis.p2c(xrange.to); yrange.from = yrange.axis.p2c(yrange.from); yrange.to = yrange.axis.p2c(yrange.to); if (xrange.from == xrange.to || yrange.from == yrange.to) { // draw line ctx.beginPath(); ctx.strokeStyle = m.color || options.grid.markingsColor; ctx.lineWidth = m.lineWidth || options.grid.markingsLineWidth; ctx.moveTo(xrange.from, yrange.from); ctx.lineTo(xrange.to, yrange.to); ctx.stroke(); } else { // fill area ctx.fillStyle = m.color || options.grid.markingsColor; ctx.fillRect(xrange.from, yrange.to, xrange.to - xrange.from, yrange.from - yrange.to); } } } // draw the ticks axes = allAxes(); bw = options.grid.borderWidth; for (var j = 0; j < axes.length; ++j) { var axis = axes[j], box = axis.box, t = axis.tickLength, x, y, xoff, yoff; if (!axis.show || axis.ticks.length == 0) continue; ctx.lineWidth = 1; // find the edges if (axis.direction == "x") { x = 0; if (t == "full") y = (axis.position == "top" ? 0 : plotHeight); else y = box.top - plotOffset.top + (axis.position == "top" ? box.height : 0); } else { y = 0; if (t == "full") x = (axis.position == "left" ? 0 : plotWidth); else x = box.left - plotOffset.left + (axis.position == "left" ? box.width : 0); } // draw tick bar if (!axis.innermost) { ctx.strokeStyle = axis.options.color; ctx.beginPath(); xoff = yoff = 0; if (axis.direction == "x") xoff = plotWidth + 1; else yoff = plotHeight + 1; if (ctx.lineWidth == 1) { if (axis.direction == "x") { y = Math.floor(y) + 0.5; } else { x = Math.floor(x) + 0.5; } } ctx.moveTo(x, y); ctx.lineTo(x + xoff, y + yoff); ctx.stroke(); } // draw ticks ctx.strokeStyle = axis.options.tickColor; ctx.beginPath(); for (i = 0; i < axis.ticks.length; ++i) { var v = axis.ticks[i].v; xoff = yoff = 0; if (isNaN(v) || v < axis.min || v > axis.max // skip those lying on the axes if we got a border || (t == "full" && ((typeof bw == "object" && bw[axis.position] > 0) || bw > 0) && (v == axis.min || v == axis.max))) continue; if (axis.direction == "x") { x = axis.p2c(v); yoff = t == "full" ? -plotHeight : t; if (axis.position == "top") yoff = -yoff; } else { y = axis.p2c(v); xoff = t == "full" ? -plotWidth : t; if (axis.position == "left") xoff = -xoff; } if (ctx.lineWidth == 1) { if (axis.direction == "x") x = Math.floor(x) + 0.5; else y = Math.floor(y) + 0.5; } ctx.moveTo(x, y); ctx.lineTo(x + xoff, y + yoff); } ctx.stroke(); } // draw border if (bw) { // If either borderWidth or borderColor is an object, then draw the border // line by line instead of as one rectangle bc = options.grid.borderColor; if(typeof bw == "object" || typeof bc == "object") { if (typeof bw !== "object") { bw = {top: bw, right: bw, bottom: bw, left: bw}; } if (typeof bc !== "object") { bc = {top: bc, right: bc, bottom: bc, left: bc}; } if (bw.top > 0) { ctx.strokeStyle = bc.top; ctx.lineWidth = bw.top; ctx.beginPath(); ctx.moveTo(0 - bw.left, 0 - bw.top/2); ctx.lineTo(plotWidth, 0 - bw.top/2); ctx.stroke(); } if (bw.right > 0) { ctx.strokeStyle = bc.right; ctx.lineWidth = bw.right; ctx.beginPath(); ctx.moveTo(plotWidth + bw.right / 2, 0 - bw.top); ctx.lineTo(plotWidth + bw.right / 2, plotHeight); ctx.stroke(); } if (bw.bottom > 0) { ctx.strokeStyle = bc.bottom; ctx.lineWidth = bw.bottom; ctx.beginPath(); ctx.moveTo(plotWidth + bw.right, plotHeight + bw.bottom / 2); ctx.lineTo(0, plotHeight + bw.bottom / 2); ctx.stroke(); } if (bw.left > 0) { ctx.strokeStyle = bc.left; ctx.lineWidth = bw.left; ctx.beginPath(); ctx.moveTo(0 - bw.left/2, plotHeight + bw.bottom); ctx.lineTo(0- bw.left/2, 0); ctx.stroke(); } } else { ctx.lineWidth = bw; ctx.strokeStyle = options.grid.borderColor; ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw); } } ctx.restore(); } function drawAxisLabels() { $.each(allAxes(), function (_, axis) { if (!axis.show || axis.ticks.length == 0) return; var box = axis.box, legacyStyles = axis.direction + "Axis " + axis.direction + axis.n + "Axis", layer = "flot-" + axis.direction + "-axis flot-" + axis.direction + axis.n + "-axis " + legacyStyles, font = axis.options.font || "flot-tick-label tickLabel", tick, x, y, halign, valign; surface.removeText(layer); for (var i = 0; i < axis.ticks.length; ++i) { tick = axis.ticks[i]; if (!tick.label || tick.v < axis.min || tick.v > axis.max) continue; if (axis.direction == "x") { halign = "center"; x = plotOffset.left + axis.p2c(tick.v); if (axis.position == "bottom") { y = box.top + box.padding; } else { y = box.top + box.height - box.padding; valign = "bottom"; } } else { valign = "middle"; y = plotOffset.top + axis.p2c(tick.v); if (axis.position == "left") { x = box.left + box.width - box.padding; halign = "right"; } else { x = box.left + box.padding; } } surface.addText(layer, x, y, tick.label, font, null, halign, valign); } }); } function drawSeries(series) { if (series.lines.show) drawSeriesLines(series); if (series.bars.show) drawSeriesBars(series); if (series.points.show) drawSeriesPoints(series); } function drawSeriesLines(series) { function plotLine(datapoints, xoffset, yoffset, axisx, axisy) { var points = datapoints.points, ps = datapoints.pointsize, prevx = null, prevy = null; ctx.beginPath(); for (var i = ps; i < points.length; i += ps) { var x1 = points[i - ps], y1 = points[i - ps + 1], x2 = points[i], y2 = points[i + 1]; if (x1 == null || x2 == null) continue; // clip with ymin if (y1 <= y2 && y1 < axisy.min) { if (y2 < axisy.min) continue; // line segment is outside // compute new intersection point x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; y1 = axisy.min; } else if (y2 <= y1 && y2 < axisy.min) { if (y1 < axisy.min) continue; x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; y2 = axisy.min; } // clip with ymax if (y1 >= y2 && y1 > axisy.max) { if (y2 > axisy.max) continue; x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; y1 = axisy.max; } else if (y2 >= y1 && y2 > axisy.max) { if (y1 > axisy.max) continue; x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; y2 = axisy.max; } // clip with xmin if (x1 <= x2 && x1 < axisx.min) { if (x2 < axisx.min) continue; y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; x1 = axisx.min; } else if (x2 <= x1 && x2 < axisx.min) { if (x1 < axisx.min) continue; y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; x2 = axisx.min; } // clip with xmax if (x1 >= x2 && x1 > axisx.max) { if (x2 > axisx.max) continue; y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; x1 = axisx.max; } else if (x2 >= x1 && x2 > axisx.max) { if (x1 > axisx.max) continue; y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; x2 = axisx.max; } if (x1 != prevx || y1 != prevy) ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset); prevx = x2; prevy = y2; ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset); } ctx.stroke(); } function plotLineArea(datapoints, axisx, axisy) { var points = datapoints.points, ps = datapoints.pointsize, bottom = Math.min(Math.max(0, axisy.min), axisy.max), i = 0, top, areaOpen = false, ypos = 1, segmentStart = 0, segmentEnd = 0; // we process each segment in two turns, first forward // direction to sketch out top, then once we hit the // end we go backwards to sketch the bottom while (true) { if (ps > 0 && i > points.length + ps) break; i += ps; // ps is negative if going backwards var x1 = points[i - ps], y1 = points[i - ps + ypos], x2 = points[i], y2 = points[i + ypos]; if (areaOpen) { if (ps > 0 && x1 != null && x2 == null) { // at turning point segmentEnd = i; ps = -ps; ypos = 2; continue; } if (ps < 0 && i == segmentStart + ps) { // done with the reverse sweep ctx.fill(); areaOpen = false; ps = -ps; ypos = 1; i = segmentStart = segmentEnd + ps; continue; } } if (x1 == null || x2 == null) continue; // clip x values // clip with xmin if (x1 <= x2 && x1 < axisx.min) { if (x2 < axisx.min) continue; y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; x1 = axisx.min; } else if (x2 <= x1 && x2 < axisx.min) { if (x1 < axisx.min) continue; y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; x2 = axisx.min; } // clip with xmax if (x1 >= x2 && x1 > axisx.max) { if (x2 > axisx.max) continue; y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; x1 = axisx.max; } else if (x2 >= x1 && x2 > axisx.max) { if (x1 > axisx.max) continue; y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; x2 = axisx.max; } if (!areaOpen) { // open area ctx.beginPath(); ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom)); areaOpen = true; } // now first check the case where both is outside if (y1 >= axisy.max && y2 >= axisy.max) { ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max)); ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max)); continue; } else if (y1 <= axisy.min && y2 <= axisy.min) { ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min)); ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min)); continue; } // else it's a bit more complicated, there might // be a flat maxed out rectangle first, then a // triangular cutout or reverse; to find these // keep track of the current x values var x1old = x1, x2old = x2; // clip the y values, without shortcutting, we // go through all cases in turn // clip with ymin if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) { x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; y1 = axisy.min; } else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) { x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; y2 = axisy.min; } // clip with ymax if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) { x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; y1 = axisy.max; } else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) { x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; y2 = axisy.max; } // if the x value was changed we got a rectangle // to fill if (x1 != x1old) { ctx.lineTo(axisx.p2c(x1old), axisy.p2c(y1)); // it goes to (x1, y1), but we fill that below } // fill triangular section, this sometimes result // in redundant points if (x1, y1) hasn't changed // from previous line to, but we just ignore that ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1)); ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); // fill the other rectangle if it's there if (x2 != x2old) { ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); ctx.lineTo(axisx.p2c(x2old), axisy.p2c(y2)); } } } ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); ctx.lineJoin = "round"; var lw = series.lines.lineWidth, sw = series.shadowSize; // FIXME: consider another form of shadow when filling is turned on if (lw > 0 && sw > 0) { // draw shadow as a thick and thin line with transparency ctx.lineWidth = sw; ctx.strokeStyle = "rgba(0,0,0,0.1)"; // position shadow at angle from the mid of line var angle = Math.PI/18; plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/2), Math.cos(angle) * (lw/2 + sw/2), series.xaxis, series.yaxis); ctx.lineWidth = sw/2; plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/4), Math.cos(angle) * (lw/2 + sw/4), series.xaxis, series.yaxis); } ctx.lineWidth = lw; ctx.strokeStyle = series.color; var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight); if (fillStyle) { ctx.fillStyle = fillStyle; plotLineArea(series.datapoints, series.xaxis, series.yaxis); } if (lw > 0) plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis); ctx.restore(); } function drawSeriesPoints(series) { function plotPoints(datapoints, radius, fillStyle, offset, shadow, axisx, axisy, symbol) { var points = datapoints.points, ps = datapoints.pointsize; for (var i = 0; i < points.length; i += ps) { var x = points[i], y = points[i + 1]; if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) continue; ctx.beginPath(); x = axisx.p2c(x); y = axisy.p2c(y) + offset; if (symbol == "circle") ctx.arc(x, y, radius, 0, shadow ? Math.PI : Math.PI * 2, false); else symbol(ctx, x, y, radius, shadow); ctx.closePath(); if (fillStyle) { ctx.fillStyle = fillStyle; ctx.fill(); } ctx.stroke(); } } ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); var lw = series.points.lineWidth, sw = series.shadowSize, radius = series.points.radius, symbol = series.points.symbol; // If the user sets the line width to 0, we change it to a very // small value. A line width of 0 seems to force the default of 1. // Doing the conditional here allows the shadow setting to still be // optional even with a lineWidth of 0. if( lw == 0 ) lw = 0.0001; if (lw > 0 && sw > 0) { // draw shadow in two steps var w = sw / 2; ctx.lineWidth = w; ctx.strokeStyle = "rgba(0,0,0,0.1)"; plotPoints(series.datapoints, radius, null, w + w/2, true, series.xaxis, series.yaxis, symbol); ctx.strokeStyle = "rgba(0,0,0,0.2)"; plotPoints(series.datapoints, radius, null, w/2, true, series.xaxis, series.yaxis, symbol); } ctx.lineWidth = lw; ctx.strokeStyle = series.color; plotPoints(series.datapoints, radius, getFillStyle(series.points, series.color), 0, false, series.xaxis, series.yaxis, symbol); ctx.restore(); } function drawBar(x, y, b, barLeft, barRight, offset, fillStyleCallback, axisx, axisy, c, horizontal, lineWidth) { var left, right, bottom, top, drawLeft, drawRight, drawTop, drawBottom, tmp; // in horizontal mode, we start the bar from the left // instead of from the bottom so it appears to be // horizontal rather than vertical if (horizontal) { drawBottom = drawRight = drawTop = true; drawLeft = false; left = b; right = x; top = y + barLeft; bottom = y + barRight; // account for negative bars if (right < left) { tmp = right; right = left; left = tmp; drawLeft = true; drawRight = false; } } else { drawLeft = drawRight = drawTop = true; drawBottom = false; left = x + barLeft; right = x + barRight; bottom = b; top = y; // account for negative bars if (top < bottom) { tmp = top; top = bottom; bottom = tmp; drawBottom = true; drawTop = false; } } // clip if (right < axisx.min || left > axisx.max || top < axisy.min || bottom > axisy.max) return; if (left < axisx.min) { left = axisx.min; drawLeft = false; } if (right > axisx.max) { right = axisx.max; drawRight = false; } if (bottom < axisy.min) { bottom = axisy.min; drawBottom = false; } if (top > axisy.max) { top = axisy.max; drawTop = false; } left = axisx.p2c(left); bottom = axisy.p2c(bottom); right = axisx.p2c(right); top = axisy.p2c(top); // fill the bar if (fillStyleCallback) { c.beginPath(); c.moveTo(left, bottom); c.lineTo(left, top); c.lineTo(right, top); c.lineTo(right, bottom); c.fillStyle = fillStyleCallback(bottom, top); c.fill(); } // draw outline if (lineWidth > 0 && (drawLeft || drawRight || drawTop || drawBottom)) { c.beginPath(); // FIXME: inline moveTo is buggy with excanvas c.moveTo(left, bottom + offset); if (drawLeft) c.lineTo(left, top + offset); else c.moveTo(left, top + offset); if (drawTop) c.lineTo(right, top + offset); else c.moveTo(right, top + offset); if (drawRight) c.lineTo(right, bottom + offset); else c.moveTo(right, bottom + offset); if (drawBottom) c.lineTo(left, bottom + offset); else c.moveTo(left, bottom + offset); c.stroke(); } } function drawSeriesBars(series) { function plotBars(datapoints, barLeft, barRight, offset, fillStyleCallback, axisx, axisy) { var points = datapoints.points, ps = datapoints.pointsize; for (var i = 0; i < points.length; i += ps) { if (points[i] == null) continue; drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, offset, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal, series.bars.lineWidth); } } ctx.save(); ctx.translate(plotOffset.left, plotOffset.top); // FIXME: figure out a way to add shadows (for instance along the right edge) ctx.lineWidth = series.bars.lineWidth; ctx.strokeStyle = series.color; var barLeft; switch (series.bars.align) { case "left": barLeft = 0; break; case "right": barLeft = -series.bars.barWidth; break; case "center": barLeft = -series.bars.barWidth / 2; break; default: throw new Error("Invalid bar alignment: " + series.bars.align); } var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null; plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, 0, fillStyleCallback, series.xaxis, series.yaxis); ctx.restore(); } function getFillStyle(filloptions, seriesColor, bottom, top) { var fill = filloptions.fill; if (!fill) return null; if (filloptions.fillColor) return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor); var c = $.color.parse(seriesColor); c.a = typeof fill == "number" ? fill : 0.4; c.normalize(); return c.toString(); } function insertLegend() { placeholder.find(".legend").remove(); if (!options.legend.show) return; var fragments = [], entries = [], rowStarted = false, lf = options.legend.labelFormatter, s, label; // Build a list of legend entries, with each having a label and a color for (var i = 0; i < series.length; ++i) { s = series[i]; if (s.label) { label = lf ? lf(s.label, s) : s.label; if (label) { entries.push({ label: label, color: s.color }); } } } // Sort the legend using either the default or a custom comparator if (options.legend.sorted) { if ($.isFunction(options.legend.sorted)) { entries.sort(options.legend.sorted); } else if (options.legend.sorted == "reverse") { entries.reverse(); } else { var ascending = options.legend.sorted != "descending"; entries.sort(function(a, b) { return a.label == b.label ? 0 : ( (a.label < b.label) != ascending ? 1 : -1 // Logical XOR ); }); } } // Generate markup for the list of entries, in their final order for (var i = 0; i < entries.length; ++i) { var entry = entries[i]; if (i % options.legend.noColumns == 0) { if (rowStarted) fragments.push(''); fragments.push(''); rowStarted = true; } fragments.push( '
' + '' + entry.label + '' ); } if (rowStarted) fragments.push(''); if (fragments.length == 0) return; var table = '' + fragments.join("") + '
'; if (options.legend.container != null) $(options.legend.container).html(table); else { var pos = "", p = options.legend.position, m = options.legend.margin; if (m[0] == null) m = [m, m]; if (p.charAt(0) == "n") pos += 'top:' + (m[1] + plotOffset.top) + 'px;'; else if (p.charAt(0) == "s") pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;'; if (p.charAt(1) == "e") pos += 'right:' + (m[0] + plotOffset.right) + 'px;'; else if (p.charAt(1) == "w") pos += 'left:' + (m[0] + plotOffset.left) + 'px;'; var legend = $('
' + table.replace('style="', 'style="position:absolute;' + pos +';') + '
').appendTo(placeholder); if (options.legend.backgroundOpacity != 0.0) { // put in the transparent background // separately to avoid blended labels and // label boxes var c = options.legend.backgroundColor; if (c == null) { c = options.grid.backgroundColor; if (c && typeof c == "string") c = $.color.parse(c); else c = $.color.extract(legend, 'background-color'); c.a = 1; c = c.toString(); } var div = legend.children(); $('
').prependTo(legend).css('opacity', options.legend.backgroundOpacity); } } } // interactive features var highlights = [], redrawTimeout = null; // returns the data item the mouse is over, or null if none is found function findNearbyItem(mouseX, mouseY, seriesFilter) { var maxDistance = options.grid.mouseActiveRadius, smallestDistance = maxDistance * maxDistance + 1, item = null, foundPoint = false, i, j, ps; for (i = series.length - 1; i >= 0; --i) { if (!seriesFilter(series[i])) continue; var s = series[i], axisx = s.xaxis, axisy = s.yaxis, points = s.datapoints.points, mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster my = axisy.c2p(mouseY), maxx = maxDistance / axisx.scale, maxy = maxDistance / axisy.scale; ps = s.datapoints.pointsize; // with inverse transforms, we can't use the maxx/maxy // optimization, sadly if (axisx.options.inverseTransform) maxx = Number.MAX_VALUE; if (axisy.options.inverseTransform) maxy = Number.MAX_VALUE; if (s.lines.show || s.points.show) { for (j = 0; j < points.length; j += ps) { var x = points[j], y = points[j + 1]; if (x == null) continue; // For points and lines, the cursor must be within a // certain distance to the data point if (x - mx > maxx || x - mx < -maxx || y - my > maxy || y - my < -maxy) continue; // We have to calculate distances in pixels, not in // data units, because the scales of the axes may be different var dx = Math.abs(axisx.p2c(x) - mouseX), dy = Math.abs(axisy.p2c(y) - mouseY), dist = dx * dx + dy * dy; // we save the sqrt // use <= to ensure last point takes precedence // (last generally means on top of) if (dist < smallestDistance) { smallestDistance = dist; item = [i, j / ps]; } } } if (s.bars.show && !item) { // no other point can be nearby var barLeft = s.bars.align == "left" ? 0 : -s.bars.barWidth/2, barRight = barLeft + s.bars.barWidth; for (j = 0; j < points.length; j += ps) { var x = points[j], y = points[j + 1], b = points[j + 2]; if (x == null) continue; // for a bar graph, the cursor must be inside the bar if (series[i].bars.horizontal ? (mx <= Math.max(b, x) && mx >= Math.min(b, x) && my >= y + barLeft && my <= y + barRight) : (mx >= x + barLeft && mx <= x + barRight && my >= Math.min(b, y) && my <= Math.max(b, y))) item = [i, j / ps]; } } } if (item) { i = item[0]; j = item[1]; ps = series[i].datapoints.pointsize; return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps), dataIndex: j, series: series[i], seriesIndex: i }; } return null; } function onMouseMove(e) { if (options.grid.hoverable) triggerClickHoverEvent("plothover", e, function (s) { return s["hoverable"] != false; }); } function onMouseLeave(e) { if (options.grid.hoverable) triggerClickHoverEvent("plothover", e, function (s) { return false; }); } function onClick(e) { triggerClickHoverEvent("plotclick", e, function (s) { return s["clickable"] != false; }); } // trigger click or hover event (they send the same parameters // so we share their code) function triggerClickHoverEvent(eventname, event, seriesFilter) { var offset = eventHolder.offset(), canvasX = event.pageX - offset.left - plotOffset.left, canvasY = event.pageY - offset.top - plotOffset.top, pos = canvasToAxisCoords({ left: canvasX, top: canvasY }); pos.pageX = event.pageX; pos.pageY = event.pageY; var item = findNearbyItem(canvasX, canvasY, seriesFilter); if (item) { // fill in mouse pos for any listeners out there item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left, 10); item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top, 10); } if (options.grid.autoHighlight) { // clear auto-highlights for (var i = 0; i < highlights.length; ++i) { var h = highlights[i]; if (h.auto == eventname && !(item && h.series == item.series && h.point[0] == item.datapoint[0] && h.point[1] == item.datapoint[1])) unhighlight(h.series, h.point); } if (item) highlight(item.series, item.datapoint, eventname); } placeholder.trigger(eventname, [ pos, item ]); } function triggerRedrawOverlay() { var t = options.interaction.redrawOverlayInterval; if (t == -1) { // skip event queue drawOverlay(); return; } if (!redrawTimeout) redrawTimeout = setTimeout(drawOverlay, t); } function drawOverlay() { redrawTimeout = null; // draw highlights octx.save(); overlay.clear(); octx.translate(plotOffset.left, plotOffset.top); var i, hi; for (i = 0; i < highlights.length; ++i) { hi = highlights[i]; if (hi.series.bars.show) drawBarHighlight(hi.series, hi.point); else drawPointHighlight(hi.series, hi.point); } octx.restore(); executeHooks(hooks.drawOverlay, [octx]); } function highlight(s, point, auto) { if (typeof s == "number") s = series[s]; if (typeof point == "number") { var ps = s.datapoints.pointsize; point = s.datapoints.points.slice(ps * point, ps * (point + 1)); } var i = indexOfHighlight(s, point); if (i == -1) { highlights.push({ series: s, point: point, auto: auto }); triggerRedrawOverlay(); } else if (!auto) highlights[i].auto = false; } function unhighlight(s, point) { if (s == null && point == null) { highlights = []; triggerRedrawOverlay(); return; } if (typeof s == "number") s = series[s]; if (typeof point == "number") { var ps = s.datapoints.pointsize; point = s.datapoints.points.slice(ps * point, ps * (point + 1)); } var i = indexOfHighlight(s, point); if (i != -1) { highlights.splice(i, 1); triggerRedrawOverlay(); } } function indexOfHighlight(s, p) { for (var i = 0; i < highlights.length; ++i) { var h = highlights[i]; if (h.series == s && h.point[0] == p[0] && h.point[1] == p[1]) return i; } return -1; } function drawPointHighlight(series, point) { var x = point[0], y = point[1], axisx = series.xaxis, axisy = series.yaxis, highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(); if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) return; var pointRadius = series.points.radius + series.points.lineWidth / 2; octx.lineWidth = pointRadius; octx.strokeStyle = highlightColor; var radius = 1.5 * pointRadius; x = axisx.p2c(x); y = axisy.p2c(y); octx.beginPath(); if (series.points.symbol == "circle") octx.arc(x, y, radius, 0, 2 * Math.PI, false); else series.points.symbol(octx, x, y, radius, false); octx.closePath(); octx.stroke(); } function drawBarHighlight(series, point) { var highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(), fillStyle = highlightColor, barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2; octx.lineWidth = series.bars.lineWidth; octx.strokeStyle = highlightColor; drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth, 0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal, series.bars.lineWidth); } function getColorOrGradient(spec, bottom, top, defaultColor) { if (typeof spec == "string") return spec; else { // assume this is a gradient spec; IE currently only // supports a simple vertical gradient properly, so that's // what we support too var gradient = ctx.createLinearGradient(0, top, 0, bottom); for (var i = 0, l = spec.colors.length; i < l; ++i) { var c = spec.colors[i]; if (typeof c != "string") { var co = $.color.parse(defaultColor); if (c.brightness != null) co = co.scale('rgb', c.brightness); if (c.opacity != null) co.a *= c.opacity; c = co.toString(); } gradient.addColorStop(i / (l - 1), c); } return gradient; } } } // Add the plot function to the top level of the jQuery object $.plot = function(placeholder, data, options) { //var t0 = new Date(); var plot = new Plot($(placeholder), data, options, $.plot.plugins); //(window.console ? console.log : alert)("time used (msecs): " + ((new Date()).getTime() - t0.getTime())); return plot; }; $.plot.version = "0.8.0"; $.plot.plugins = []; // Also add the plot function as a chainable property $.fn.plot = function(data, options) { return this.each(function() { $.plot(this, data, options); }); } // round to nearby lower multiple of base function floorInBase(n, base) { return base * Math.floor(n / base); } })(jQuery); ================================================ FILE: vendor/assets/javascripts/jquery.tablesorter.js ================================================ /**! * TableSorter (FORK) 2.18.3 - Client-side table sorting with ease! * @requires jQuery v1.2.6+ * * Copyright (c) 2007 Christian Bach * Examples and docs at: http://tablesorter.com * Dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl.html * * @type jQuery * @name tablesorter (FORK) * @cat Plugins/Tablesorter * @author Christian Bach/christian.bach@polyester.se * @contributor Rob Garrison/https://github.com/Mottie/tablesorter */ /*jshint browser:true, jquery:true, unused:false, expr: true */ /*global console:false, alert:false */ !(function($) { "use strict"; $.extend({ /*jshint supernew:true */ tablesorter: new function() { var ts = this; ts.version = "2.18.3"; ts.parsers = []; ts.widgets = []; ts.defaults = { // *** appearance theme : 'default', // adds tablesorter-{theme} to the table for styling widthFixed : false, // adds colgroup to fix widths of columns showProcessing : false, // show an indeterminate timer icon in the header when the table is sorted or filtered. headerTemplate : '{content}',// header layout template (HTML ok); {content} = innerHTML, {icon} = (class from cssIcon) onRenderTemplate : null, // function(index, template){ return template; }, (template is a string) onRenderHeader : null, // function(index){}, (nothing to return) // *** functionality cancelSelection : true, // prevent text selection in the header tabIndex : true, // add tabindex to header for keyboard accessibility dateFormat : 'mmddyyyy', // other options: "ddmmyyy" or "yyyymmdd" sortMultiSortKey : 'shiftKey', // key used to select additional columns sortResetKey : 'ctrlKey', // key used to remove sorting on a column usNumberFormat : true, // false for German "1.234.567,89" or French "1 234 567,89" delayInit : false, // if false, the parsed table contents will not update until the first sort serverSideSorting: false, // if true, server-side sorting should be performed because client-side sorting will be disabled, but the ui and events will still be used. // *** sort options headers : {}, // set sorter, string, empty, locked order, sortInitialOrder, filter, etc. ignoreCase : true, // ignore case while sorting sortForce : null, // column(s) first sorted; always applied sortList : [], // Initial sort order; applied initially; updated when manually sorted sortAppend : null, // column(s) sorted last; always applied sortStable : false, // when sorting two rows with exactly the same content, the original sort order is maintained sortInitialOrder : 'asc', // sort direction on first click sortLocaleCompare: false, // replace equivalent character (accented characters) sortReset : false, // third click on the header will reset column to default - unsorted sortRestart : false, // restart sort to "sortInitialOrder" when clicking on previously unsorted columns emptyTo : 'bottom', // sort empty cell to bottom, top, none, zero stringTo : 'max', // sort strings in numerical column as max, min, top, bottom, zero textExtraction : 'basic', // text extraction method/function - function(node, table, cellIndex){} textAttribute : 'data-text',// data-attribute that contains alternate cell text (used in textExtraction function) textSorter : null, // choose overall or specific column sorter function(a, b, direction, table, columnIndex) [alt: ts.sortText] numberSorter : null, // choose overall numeric sorter function(a, b, direction, maxColumnValue) // *** widget options widgets: [], // method to add widgets, e.g. widgets: ['zebra'] widgetOptions : { zebra : [ 'even', 'odd' ] // zebra widget alternating row class names }, initWidgets : true, // apply widgets on tablesorter initialization widgetClass : 'widget-{name}', // table class name template to match to include a widget // *** callbacks initialized : null, // function(table){}, // *** extra css class names tableClass : '', cssAsc : '', cssDesc : '', cssNone : '', cssHeader : '', cssHeaderRow : '', cssProcessing : '', // processing icon applied to header during sort/filter cssChildRow : 'tablesorter-childRow', // class name indiciating that a row is to be attached to the its parent cssIcon : 'tablesorter-icon', // if this class exists, a will be added to the header automatically cssIconNone : '', // class name added to the icon when there is no column sort cssIconAsc : '', // class name added to the icon when the column has an ascending sort cssIconDesc : '', // class name added to the icon when the column has a descending sort cssInfoBlock : 'tablesorter-infoOnly', // don't sort tbody with this class name (only one class name allowed here!) cssAllowClicks : 'tablesorter-allowClicks', // class name added to table header which allows clicks to bubble up // *** selectors selectorHeaders : '> thead th, > thead td', selectorSort : 'th, td', // jQuery selector of content within selectorHeaders that is clickable to trigger a sort selectorRemove : '.remove-me', // *** advanced debug : false, // *** Internal variables headerList: [], empties: {}, strings: {}, parsers: [] // deprecated; but retained for backwards compatibility // widgetZebra: { css: ["even", "odd"] } }; // internal css classes - these will ALWAYS be added to // the table and MUST only contain one class name - fixes #381 ts.css = { table : 'tablesorter', cssHasChild: 'tablesorter-hasChildRow', childRow : 'tablesorter-childRow', header : 'tablesorter-header', headerRow : 'tablesorter-headerRow', headerIn : 'tablesorter-header-inner', icon : 'tablesorter-icon', info : 'tablesorter-infoOnly', processing : 'tablesorter-processing', sortAsc : 'tablesorter-headerAsc', sortDesc : 'tablesorter-headerDesc', sortNone : 'tablesorter-headerUnSorted' }; // labels applied to sortable headers for accessibility (aria) support ts.language = { sortAsc : 'Ascending sort applied, ', sortDesc : 'Descending sort applied, ', sortNone : 'No sort applied, ', nextAsc : 'activate to apply an ascending sort', nextDesc : 'activate to apply a descending sort', nextNone : 'activate to remove the sort' }; /* debuging utils */ function log() { var a = arguments[0], s = arguments.length > 1 ? Array.prototype.slice.call(arguments) : a; if (typeof console !== "undefined" && typeof console.log !== "undefined") { console[ /error/i.test(a) ? 'error' : /warn/i.test(a) ? 'warn' : 'log' ](s); } else { alert(s); } } function benchmark(s, d) { log(s + " (" + (new Date().getTime() - d.getTime()) + "ms)"); } ts.log = log; ts.benchmark = benchmark; // $.isEmptyObject from jQuery v1.4 function isEmptyObject(obj) { /*jshint forin: false */ for (var name in obj) { return false; } return true; } function getElementText(table, node, cellIndex) { if (!node) { return ""; } var te, c = table.config, t = c.textExtraction || '', text = ""; if (t === "basic") { // check data-attribute first text = $(node).attr(c.textAttribute) || node.textContent || node.innerText || $(node).text() || ""; } else { if (typeof(t) === "function") { text = t(node, table, cellIndex); } else if (typeof (te = ts.getColumnData( table, t, cellIndex )) === 'function') { text = te(node, table, cellIndex); } else { // previous "simple" method text = node.textContent || node.innerText || $(node).text() || ""; } } return $.trim(text); } function detectParserForColumn(table, rows, rowIndex, cellIndex) { var cur, $node, i = ts.parsers.length, node = false, nodeValue = '', keepLooking = true; while (nodeValue === '' && keepLooking) { rowIndex++; if (rows[rowIndex]) { node = rows[rowIndex].cells[cellIndex]; nodeValue = getElementText(table, node, cellIndex); $node = $(node); if (table.config.debug) { log('Checking if value was empty on row ' + rowIndex + ', column: ' + cellIndex + ': "' + nodeValue + '"'); } } else { keepLooking = false; } } while (--i >= 0) { cur = ts.parsers[i]; // ignore the default text parser because it will always be true if (cur && cur.id !== 'text' && cur.is && cur.is(nodeValue, table, node, $node)) { return cur; } } // nothing found, return the generic parser (text) return ts.getParserById('text'); } function buildParserCache(table) { var c = table.config, // update table bodies in case we start with an empty table tb = c.$tbodies = c.$table.children('tbody:not(.' + c.cssInfoBlock + ')'), rows, list, l, i, h, ch, np, p, e, time, j = 0, parsersDebug = "", len = tb.length; if ( len === 0) { return c.debug ? log('Warning: *Empty table!* Not building a parser cache') : ''; } else if (c.debug) { time = new Date(); log('Detecting parsers for each column'); } list = { extractors: [], parsers: [] }; while (j < len) { rows = tb[j].rows; if (rows[j]) { l = c.columns; // rows[j].cells.length; for (i = 0; i < l; i++) { h = c.$headers.filter('[data-column="' + i + '"]:last'); // get column indexed table cell ch = ts.getColumnData( table, c.headers, i ); // get column parser/extractor e = ts.getParserById( ts.getData(h, ch, 'extractor') ); p = ts.getParserById( ts.getData(h, ch, 'sorter') ); np = ts.getData(h, ch, 'parser') === 'false'; // empty cells behaviour - keeping emptyToBottom for backwards compatibility c.empties[i] = ( ts.getData(h, ch, 'empty') || c.emptyTo || (c.emptyToBottom ? 'bottom' : 'top' ) ).toLowerCase(); // text strings behaviour in numerical sorts c.strings[i] = ( ts.getData(h, ch, 'string') || c.stringTo || 'max' ).toLowerCase(); if (np) { p = ts.getParserById('no-parser'); } if (!e) { // For now, maybe detect someday e = false; } if (!p) { p = detectParserForColumn(table, rows, -1, i); } if (c.debug) { parsersDebug += "column:" + i + "; extractor:" + e.id + "; parser:" + p.id + "; string:" + c.strings[i] + '; empty: ' + c.empties[i] + "\n"; } list.parsers[i] = p; list.extractors[i] = e; } } j += (list.parsers.length) ? len : 1; } if (c.debug) { log(parsersDebug ? parsersDebug : "No parsers detected"); benchmark("Completed detecting parsers", time); } c.parsers = list.parsers; c.extractors = list.extractors; } /* utils */ function buildCache(table) { var cc, t, tx, v, i, j, k, $row, rows, cols, cacheTime, totalRows, rowData, colMax, c = table.config, $tb = c.$table.children('tbody'), extractors = c.extractors, parsers = c.parsers; c.cache = {}; c.totalRows = 0; // if no parsers found, return - it's an empty table. if (!parsers) { return c.debug ? log('Warning: *Empty table!* Not building a cache') : ''; } if (c.debug) { cacheTime = new Date(); } // processing icon if (c.showProcessing) { ts.isProcessing(table, true); } for (k = 0; k < $tb.length; k++) { colMax = []; // column max value per tbody cc = c.cache[k] = { normalized: [] // array of normalized row data; last entry contains "rowData" above // colMax: # // added at the end }; // ignore tbodies with class name from c.cssInfoBlock if (!$tb.eq(k).hasClass(c.cssInfoBlock)) { totalRows = ($tb[k] && $tb[k].rows.length) || 0; for (i = 0; i < totalRows; ++i) { rowData = { // order: original row order # // $row : jQuery Object[] child: [] // child row text (filter widget) }; /** Add the table data to main data array */ $row = $($tb[k].rows[i]); rows = [ new Array(c.columns) ]; cols = []; // if this is a child row, add it to the last row's children and continue to the next row // ignore child row class, if it is the first row if ($row.hasClass(c.cssChildRow) && i !== 0) { t = cc.normalized.length - 1; cc.normalized[t][c.columns].$row = cc.normalized[t][c.columns].$row.add($row); // add "hasChild" class name to parent row if (!$row.prev().hasClass(c.cssChildRow)) { $row.prev().addClass(ts.css.cssHasChild); } // save child row content (un-parsed!) rowData.child[t] = $.trim( $row[0].textContent || $row[0].innerText || $row.text() || "" ); // go to the next for loop continue; } rowData.$row = $row; rowData.order = i; // add original row position to rowCache for (j = 0; j < c.columns; ++j) { if (typeof parsers[j] === 'undefined') { if (c.debug) { log('No parser found for cell:', $row[0].cells[j], 'does it have a header?'); } continue; } t = getElementText(table, $row[0].cells[j], j); // do extract before parsing if there is one if (typeof extractors[j].id === 'undefined') { tx = t; } else { tx = extractors[j].format(t, table, $row[0].cells[j], j); } // allow parsing if the string is empty, previously parsing would change it to zero, // in case the parser needs to extract data from the table cell attributes v = parsers[j].id === 'no-parser' ? '' : parsers[j].format(tx, table, $row[0].cells[j], j); cols.push( c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v ); if ((parsers[j].type || '').toLowerCase() === "numeric") { // determine column max value (ignore sign) colMax[j] = Math.max(Math.abs(v) || 0, colMax[j] || 0); } } // ensure rowData is always in the same location (after the last column) cols[c.columns] = rowData; cc.normalized.push(cols); } cc.colMax = colMax; // total up rows, not including child rows c.totalRows += cc.normalized.length; } } if (c.showProcessing) { ts.isProcessing(table); // remove processing icon } if (c.debug) { benchmark("Building cache for " + totalRows + " rows", cacheTime); } } // init flag (true) used by pager plugin to prevent widget application function appendToTable(table, init) { var c = table.config, wo = c.widgetOptions, b = table.tBodies, rows = [], cc = c.cache, n, totalRows, $bk, $tb, i, k, appendTime; // empty table - fixes #206/#346 if (isEmptyObject(cc)) { // run pager appender in case the table was just emptied return c.appender ? c.appender(table, rows) : table.isUpdating ? c.$table.trigger("updateComplete", table) : ''; // Fixes #532 } if (c.debug) { appendTime = new Date(); } for (k = 0; k < b.length; k++) { $bk = $(b[k]); if ($bk.length && !$bk.hasClass(c.cssInfoBlock)) { // get tbody $tb = ts.processTbody(table, $bk, true); n = cc[k].normalized; totalRows = n.length; for (i = 0; i < totalRows; i++) { rows.push(n[i][c.columns].$row); // removeRows used by the pager plugin; don't render if using ajax - fixes #411 if (!c.appender || (c.pager && (!c.pager.removeRows || !wo.pager_removeRows) && !c.pager.ajax)) { $tb.append(n[i][c.columns].$row); } } // restore tbody ts.processTbody(table, $tb, false); } } if (c.appender) { c.appender(table, rows); } if (c.debug) { benchmark("Rebuilt table", appendTime); } // apply table widgets; but not before ajax completes if (!init && !c.appender) { ts.applyWidget(table); } if (table.isUpdating) { c.$table.trigger("updateComplete", table); } } function formatSortingOrder(v) { // look for "d" in "desc" order; return true return (/^d/i.test(v) || v === 1); } function buildHeaders(table) { var ch, $t, h, i, t, lock, time, c = table.config; c.headerList = []; c.headerContent = []; if (c.debug) { time = new Date(); } // children tr in tfoot - see issue #196 & #547 c.columns = ts.computeColumnIndex( c.$table.children('thead, tfoot').children('tr') ); // add icon if cssIcon option exists i = c.cssIcon ? '' : ''; // redefine c.$headers here in case of an updateAll that replaces or adds an entire header cell - see #683 c.$headers = $(table).find(c.selectorHeaders).each(function(index) { $t = $(this); // make sure to get header cell & not column indexed cell ch = ts.getColumnData( table, c.headers, index, true ); // save original header content c.headerContent[index] = $(this).html(); // if headerTemplate is empty, don't reformat the header cell if ( c.headerTemplate !== '' ) { // set up header template t = c.headerTemplate.replace(/\{content\}/g, $(this).html()).replace(/\{icon\}/g, i); if (c.onRenderTemplate) { h = c.onRenderTemplate.apply($t, [index, t]); if (h && typeof h === 'string') { t = h; } // only change t if something is returned } $(this).html('
' + t + '
'); // faster than wrapInner } if (c.onRenderHeader) { c.onRenderHeader.apply($t, [index, c, c.$table]); } // *** remove this.column value if no conflicts found this.column = parseInt( $(this).attr('data-column'), 10); this.order = formatSortingOrder( ts.getData($t, ch, 'sortInitialOrder') || c.sortInitialOrder ) ? [1,0,2] : [0,1,2]; this.count = -1; // set to -1 because clicking on the header automatically adds one this.lockedOrder = false; lock = ts.getData($t, ch, 'lockedOrder') || false; if (typeof lock !== 'undefined' && lock !== false) { this.order = this.lockedOrder = formatSortingOrder(lock) ? [1,1,1] : [0,0,0]; } $t.addClass(ts.css.header + ' ' + c.cssHeader); // add cell to headerList c.headerList[index] = this; // add to parent in case there are multiple rows $t.parent().addClass(ts.css.headerRow + ' ' + c.cssHeaderRow).attr('role', 'row'); // allow keyboard cursor to focus on element if (c.tabIndex) { $t.attr("tabindex", 0); } }).attr({ scope: 'col', role : 'columnheader' }); // enable/disable sorting updateHeader(table); if (c.debug) { benchmark("Built headers:", time); log(c.$headers); } } function commonUpdate(table, resort, callback) { var c = table.config; // remove rows/elements before update c.$table.find(c.selectorRemove).remove(); // rebuild parsers buildParserCache(table); // rebuild the cache map buildCache(table); checkResort(c.$table, resort, callback); } function updateHeader(table) { var s, $th, col, c = table.config; c.$headers.each(function(index, th){ $th = $(th); col = ts.getColumnData( table, c.headers, index, true ); // add "sorter-false" class if "parser-false" is set s = ts.getData( th, col, 'sorter' ) === 'false' || ts.getData( th, col, 'parser' ) === 'false'; th.sortDisabled = s; $th[ s ? 'addClass' : 'removeClass' ]('sorter-false').attr('aria-disabled', '' + s); // aria-controls - requires table ID if (table.id) { if (s) { $th.removeAttr('aria-controls'); } else { $th.attr('aria-controls', table.id); } } }); } function setHeadersCss(table) { var f, i, j, c = table.config, list = c.sortList, len = list.length, none = ts.css.sortNone + ' ' + c.cssNone, css = [ts.css.sortAsc + ' ' + c.cssAsc, ts.css.sortDesc + ' ' + c.cssDesc], cssIcon = [ c.cssIconAsc, c.cssIconDesc, c.cssIconNone ], aria = ['ascending', 'descending'], // find the footer $t = $(table).find('tfoot tr').children().add(c.$extraHeaders).removeClass(css.join(' ')); // remove all header information c.$headers .removeClass(css.join(' ')) .addClass(none).attr('aria-sort', 'none') .find('.' + c.cssIcon) .removeClass(cssIcon.join(' ')) .addClass(cssIcon[2]); for (i = 0; i < len; i++) { // direction = 2 means reset! if (list[i][1] !== 2) { // multicolumn sorting updating - choose the :last in case there are nested columns f = c.$headers.not('.sorter-false').filter('[data-column="' + list[i][0] + '"]' + (len === 1 ? ':last' : '') ); if (f.length) { for (j = 0; j < f.length; j++) { if (!f[j].sortDisabled) { f.eq(j) .removeClass(none) .addClass(css[list[i][1]]) .attr('aria-sort', aria[list[i][1]]) .find('.' + c.cssIcon) .removeClass(cssIcon[2]) .addClass(cssIcon[list[i][1]]); } } // add sorted class to footer & extra headers, if they exist if ($t.length) { $t.filter('[data-column="' + list[i][0] + '"]').removeClass(none).addClass(css[list[i][1]]); } } } } // add verbose aria labels c.$headers.not('.sorter-false').each(function(){ var $this = $(this), nextSort = this.order[(this.count + 1) % (c.sortReset ? 3 : 2)], txt = $this.text() + ': ' + ts.language[ $this.hasClass(ts.css.sortAsc) ? 'sortAsc' : $this.hasClass(ts.css.sortDesc) ? 'sortDesc' : 'sortNone' ] + ts.language[ nextSort === 0 ? 'nextAsc' : nextSort === 1 ? 'nextDesc' : 'nextNone' ]; $this.attr('aria-label', txt ); }); } // automatically add col group, and column sizes if set function fixColumnWidth(table) { var colgroup, overallWidth, c = table.config; if (c.widthFixed && c.$table.children('colgroup').length === 0) { colgroup = $(''); overallWidth = $(table).width(); // only add col for visible columns - fixes #371 $(table.tBodies).not('.' + c.cssInfoBlock).find("tr:first").children(":visible").each(function() { colgroup.append($('').css('width', parseInt(($(this).width()/overallWidth)*1000, 10)/10 + '%')); }); c.$table.prepend(colgroup); } } function updateHeaderSortCount(table, list) { var s, t, o, col, primary, c = table.config, sl = list || c.sortList; c.sortList = []; $.each(sl, function(i,v){ // ensure all sortList values are numeric - fixes #127 col = parseInt(v[0], 10); // make sure header exists o = c.$headers.filter('[data-column="' + col + '"]:last')[0]; if (o) { // prevents error if sorton array is wrong // o.count = o.count + 1; t = ('' + v[1]).match(/^(1|d|s|o|n)/); t = t ? t[0] : ''; // 0/(a)sc (default), 1/(d)esc, (s)ame, (o)pposite, (n)ext switch(t) { case '1': case 'd': // descending t = 1; break; case 's': // same direction (as primary column) // if primary sort is set to "s", make it ascending t = primary || 0; break; case 'o': s = o.order[(primary || 0) % (c.sortReset ? 3 : 2)]; // opposite of primary column; but resets if primary resets t = s === 0 ? 1 : s === 1 ? 0 : 2; break; case 'n': o.count = o.count + 1; t = o.order[(o.count) % (c.sortReset ? 3 : 2)]; break; default: // ascending t = 0; break; } primary = i === 0 ? t : primary; s = [ col, parseInt(t, 10) || 0 ]; c.sortList.push(s); t = $.inArray(s[1], o.order); // fixes issue #167 o.count = t >= 0 ? t : s[1] % (c.sortReset ? 3 : 2); } }); } function getCachedSortType(parsers, i) { return (parsers && parsers[i]) ? parsers[i].type || '' : ''; } function initSort(table, cell, event){ if (table.isUpdating) { // let any updates complete before initializing a sort return setTimeout(function(){ initSort(table, cell, event); }, 50); } var arry, indx, col, order, s, c = table.config, key = !event[c.sortMultiSortKey], $table = c.$table; // Only call sortStart if sorting is enabled $table.trigger("sortStart", table); // get current column sort order cell.count = event[c.sortResetKey] ? 2 : (cell.count + 1) % (c.sortReset ? 3 : 2); // reset all sorts on non-current column - issue #30 if (c.sortRestart) { indx = cell; c.$headers.each(function() { // only reset counts on columns that weren't just clicked on and if not included in a multisort if (this !== indx && (key || !$(this).is('.' + ts.css.sortDesc + ',.' + ts.css.sortAsc))) { this.count = -1; } }); } // get current column index indx = parseInt( $(cell).attr('data-column'), 10 ); // user only wants to sort on one column if (key) { // flush the sort list c.sortList = []; if (c.sortForce !== null) { arry = c.sortForce; for (col = 0; col < arry.length; col++) { if (arry[col][0] !== indx) { c.sortList.push(arry[col]); } } } // add column to sort list order = cell.order[cell.count]; if (order < 2) { c.sortList.push([indx, order]); // add other columns if header spans across multiple if (cell.colSpan > 1) { for (col = 1; col < cell.colSpan; col++) { c.sortList.push([indx + col, order]); } } } // multi column sorting } else { // get rid of the sortAppend before adding more - fixes issue #115 & #523 if (c.sortAppend && c.sortList.length > 1) { for (col = 0; col < c.sortAppend.length; col++) { s = ts.isValueInArray(c.sortAppend[col][0], c.sortList); if (s >= 0) { c.sortList.splice(s,1); } } } // the user has clicked on an already sorted column if (ts.isValueInArray(indx, c.sortList) >= 0) { // reverse the sorting direction for (col = 0; col < c.sortList.length; col++) { s = c.sortList[col]; order = c.$headers.filter('[data-column="' + s[0] + '"]:last')[0]; if (s[0] === indx) { // order.count seems to be incorrect when compared to cell.count s[1] = order.order[cell.count]; if (s[1] === 2) { c.sortList.splice(col,1); order.count = -1; } } } } else { // add column to sort list array order = cell.order[cell.count]; if (order < 2) { c.sortList.push([indx, order]); // add other columns if header spans across multiple if (cell.colSpan > 1) { for (col = 1; col < cell.colSpan; col++) { c.sortList.push([indx + col, order]); } } } } } if (c.sortAppend !== null) { arry = c.sortAppend; for (col = 0; col < arry.length; col++) { if (arry[col][0] !== indx) { c.sortList.push(arry[col]); } } } // sortBegin event triggered immediately before the sort $table.trigger("sortBegin", table); // setTimeout needed so the processing icon shows up setTimeout(function(){ // set css for headers setHeadersCss(table); multisort(table); appendToTable(table); $table.trigger("sortEnd", table); }, 1); } // sort multiple columns function multisort(table) { /*jshint loopfunc:true */ var i, k, num, col, sortTime, colMax, cache, order, sort, x, y, dir = 0, c = table.config, cts = c.textSorter || '', sortList = c.sortList, l = sortList.length, bl = table.tBodies.length; if (c.serverSideSorting || isEmptyObject(c.cache)) { // empty table - fixes #206/#346 return; } if (c.debug) { sortTime = new Date(); } for (k = 0; k < bl; k++) { colMax = c.cache[k].colMax; cache = c.cache[k].normalized; cache.sort(function(a, b) { // cache is undefined here in IE, so don't use it! for (i = 0; i < l; i++) { col = sortList[i][0]; order = sortList[i][1]; // sort direction, true = asc, false = desc dir = order === 0; if (c.sortStable && a[col] === b[col] && l === 1) { return a[c.columns].order - b[c.columns].order; } // fallback to natural sort since it is more robust num = /n/i.test(getCachedSortType(c.parsers, col)); if (num && c.strings[col]) { // sort strings in numerical columns if (typeof (c.string[c.strings[col]]) === 'boolean') { num = (dir ? 1 : -1) * (c.string[c.strings[col]] ? -1 : 1); } else { num = (c.strings[col]) ? c.string[c.strings[col]] || 0 : 0; } // fall back to built-in numeric sort // var sort = $.tablesorter["sort" + s](table, a[c], b[c], c, colMax[c], dir); sort = c.numberSorter ? c.numberSorter(a[col], b[col], dir, colMax[col], table) : ts[ 'sortNumeric' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], num, colMax[col], col, table); } else { // set a & b depending on sort direction x = dir ? a : b; y = dir ? b : a; // text sort function if (typeof(cts) === 'function') { // custom OVERALL text sorter sort = cts(x[col], y[col], dir, col, table); } else if (typeof(cts) === 'object' && cts.hasOwnProperty(col)) { // custom text sorter for a SPECIFIC COLUMN sort = cts[col](x[col], y[col], dir, col, table); } else { // fall back to natural sort sort = ts[ 'sortNatural' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], col, table, c); } } if (sort) { return sort; } } return a[c.columns].order - b[c.columns].order; }); } if (c.debug) { benchmark("Sorting on " + sortList.toString() + " and dir " + order + " time", sortTime); } } function resortComplete($table, callback){ var table = $table[0]; if (table.isUpdating) { $table.trigger('updateComplete', table); } if ($.isFunction(callback)) { callback($table[0]); } } function checkResort($table, flag, callback) { var sl = $table[0].config.sortList; // don't try to resort if the table is still processing // this will catch spamming of the updateCell method if (flag !== false && !$table[0].isProcessing && sl.length) { $table.trigger("sorton", [sl, function(){ resortComplete($table, callback); }, true]); } else { resortComplete($table, callback); ts.applyWidget($table[0], false); } } function bindMethods(table){ var c = table.config, $table = c.$table; // apply easy methods that trigger bound events $table .unbind('sortReset update updateRows updateCell updateAll addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave '.split(' ').join(c.namespace + ' ')) .bind("sortReset" + c.namespace, function(e, callback){ e.stopPropagation(); c.sortList = []; setHeadersCss(table); multisort(table); appendToTable(table); if ($.isFunction(callback)) { callback(table); } }) .bind("updateAll" + c.namespace, function(e, resort, callback){ e.stopPropagation(); table.isUpdating = true; ts.refreshWidgets(table, true, true); ts.restoreHeaders(table); buildHeaders(table); ts.bindEvents(table, c.$headers, true); bindMethods(table); commonUpdate(table, resort, callback); }) .bind("update" + c.namespace + " updateRows" + c.namespace, function(e, resort, callback) { e.stopPropagation(); table.isUpdating = true; // update sorting (if enabled/disabled) updateHeader(table); commonUpdate(table, resort, callback); }) .bind("updateCell" + c.namespace, function(e, cell, resort, callback) { e.stopPropagation(); table.isUpdating = true; $table.find(c.selectorRemove).remove(); // get position from the dom var v, t, row, icell, $tb = $table.find('tbody'), $cell = $(cell), // update cache - format: function(s, table, cell, cellIndex) // no closest in jQuery v1.2.6 - tbdy = $tb.index( $(cell).closest('tbody') ),$row = $(cell).closest('tr'); tbdy = $tb.index( $.fn.closest ? $cell.closest('tbody') : $cell.parents('tbody').filter(':first') ), $row = $.fn.closest ? $cell.closest('tr') : $cell.parents('tr').filter(':first'); cell = $cell[0]; // in case cell is a jQuery object // tbody may not exist if update is initialized while tbody is removed for processing if ($tb.length && tbdy >= 0) { row = $tb.eq(tbdy).find('tr').index( $row ); icell = $cell.index(); c.cache[tbdy].normalized[row][c.columns].$row = $row; if (typeof c.extractors[icell].id === 'undefined') { t = getElementText(table, cell, icell); } else { t = c.extractors[icell].format( getElementText(table, cell, icell), table, cell, icell ); } v = c.parsers[icell].id === 'no-parser' ? '' : c.parsers[icell].format( t, table, cell, icell ); c.cache[tbdy].normalized[row][icell] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v; if ((c.parsers[icell].type || '').toLowerCase() === "numeric") { // update column max value (ignore sign) c.cache[tbdy].colMax[icell] = Math.max(Math.abs(v) || 0, c.cache[tbdy].colMax[icell] || 0); } checkResort($table, resort, callback); } }) .bind("addRows" + c.namespace, function(e, $row, resort, callback) { e.stopPropagation(); table.isUpdating = true; if (isEmptyObject(c.cache)) { // empty table, do an update instead - fixes #450 updateHeader(table); commonUpdate(table, resort, callback); } else { $row = $($row).attr('role', 'row'); // make sure we're using a jQuery object var i, j, l, t, v, rowData, cells, rows = $row.filter('tr').length, tbdy = $table.find('tbody').index( $row.parents('tbody').filter(':first') ); // fixes adding rows to an empty table - see issue #179 if (!(c.parsers && c.parsers.length)) { buildParserCache(table); } // add each row for (i = 0; i < rows; i++) { l = $row[i].cells.length; cells = []; rowData = { child: [], $row : $row.eq(i), order: c.cache[tbdy].normalized.length }; // add each cell for (j = 0; j < l; j++) { if (typeof c.extractors[j].id === 'undefined') { t = getElementText(table, $row[i].cells[j], j); } else { t = c.extractors[j].format( getElementText(table, $row[i].cells[j], j), table, $row[i].cells[j], j ); } v = c.parsers[j].id === 'no-parser' ? '' : c.parsers[j].format( t, table, $row[i].cells[j], j ); cells[j] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v; if ((c.parsers[j].type || '').toLowerCase() === "numeric") { // update column max value (ignore sign) c.cache[tbdy].colMax[j] = Math.max(Math.abs(cells[j]) || 0, c.cache[tbdy].colMax[j] || 0); } } // add the row data to the end cells.push(rowData); // update cache c.cache[tbdy].normalized.push(cells); } // resort using current settings checkResort($table, resort, callback); } }) .bind("updateComplete" + c.namespace, function(){ table.isUpdating = false; }) .bind("sorton" + c.namespace, function(e, list, callback, init) { var c = table.config; e.stopPropagation(); $table.trigger("sortStart", this); // update header count index updateHeaderSortCount(table, list); // set css for headers setHeadersCss(table); // fixes #346 if (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); } $table.trigger("sortBegin", this); // sort the table and append it to the dom multisort(table); appendToTable(table, init); $table.trigger("sortEnd", this); ts.applyWidget(table); if ($.isFunction(callback)) { callback(table); } }) .bind("appendCache" + c.namespace, function(e, callback, init) { e.stopPropagation(); appendToTable(table, init); if ($.isFunction(callback)) { callback(table); } }) .bind("updateCache" + c.namespace, function(e, callback){ // rebuild parsers if (!(c.parsers && c.parsers.length)) { buildParserCache(table); } // rebuild the cache map buildCache(table); if ($.isFunction(callback)) { callback(table); } }) .bind("applyWidgetId" + c.namespace, function(e, id) { e.stopPropagation(); ts.getWidgetById(id).format(table, c, c.widgetOptions); }) .bind("applyWidgets" + c.namespace, function(e, init) { e.stopPropagation(); // apply widgets ts.applyWidget(table, init); }) .bind("refreshWidgets" + c.namespace, function(e, all, dontapply){ e.stopPropagation(); ts.refreshWidgets(table, all, dontapply); }) .bind("destroy" + c.namespace, function(e, c, cb){ e.stopPropagation(); ts.destroy(table, c, cb); }) .bind("resetToLoadState" + c.namespace, function(){ // remove all widgets ts.refreshWidgets(table, true, true); // restore original settings; this clears out current settings, but does not clear // values saved to storage. c = $.extend(true, ts.defaults, c.originalSettings); table.hasInitialized = false; // setup the entire table again ts.setup( table, c ); }); } /* public methods */ ts.construct = function(settings) { return this.each(function() { var table = this, // merge & extend config options c = $.extend(true, {}, ts.defaults, settings); // save initial settings c.originalSettings = settings; // create a table from data (build table widget) if (!table.hasInitialized && ts.buildTable && this.tagName !== 'TABLE') { // return the table (in case the original target is the table's container) ts.buildTable(table, c); } else { ts.setup(table, c); } }); }; ts.setup = function(table, c) { // if no thead or tbody, or tablesorter is already present, quit if (!table || !table.tHead || table.tBodies.length === 0 || table.hasInitialized === true) { return c.debug ? log('ERROR: stopping initialization! No table, thead, tbody or tablesorter has already been initialized') : ''; } var k = '', $table = $(table), m = $.metadata; // initialization flag table.hasInitialized = false; // table is being processed flag table.isProcessing = true; // make sure to store the config object table.config = c; // save the settings where they read $.data(table, "tablesorter", c); if (c.debug) { $.data( table, 'startoveralltimer', new Date()); } // removing this in version 3 (only supports jQuery 1.7+) c.supportsDataObject = (function(version) { version[0] = parseInt(version[0], 10); return (version[0] > 1) || (version[0] === 1 && parseInt(version[1], 10) >= 4); })($.fn.jquery.split(".")); // digit sort text location; keeping max+/- for backwards compatibility c.string = { 'max': 1, 'min': -1, 'emptymin': 1, 'emptymax': -1, 'zero': 0, 'none': 0, 'null': 0, 'top': true, 'bottom': false }; // ensure case insensitivity c.emptyTo = c.emptyTo.toLowerCase(); c.stringTo = c.stringTo.toLowerCase(); // add table theme class only if there isn't already one there if (!/tablesorter\-/.test($table.attr('class'))) { k = (c.theme !== '' ? ' tablesorter-' + c.theme : ''); } c.table = table; c.$table = $table .addClass(ts.css.table + ' ' + c.tableClass + k) .attr('role', 'grid'); c.$headers = $table.find(c.selectorHeaders); // give the table a unique id, which will be used in namespace binding if (!c.namespace) { c.namespace = '.tablesorter' + Math.random().toString(16).slice(2); } else { // make sure namespace starts with a period & doesn't have weird characters c.namespace = '.' + c.namespace.replace(/\W/g,''); } c.$table.children().children('tr').attr('role', 'row'); c.$tbodies = $table.children('tbody:not(.' + c.cssInfoBlock + ')').attr({ 'aria-live' : 'polite', 'aria-relevant' : 'all' }); if (c.$table.children('caption').length) { k = c.$table.children('caption')[0]; if (!k.id) { k.id = c.namespace.slice(1) + 'caption'; } c.$table.attr('aria-labelledby', k.id); } c.widgetInit = {}; // keep a list of initialized widgets // change textExtraction via data-attribute c.textExtraction = c.$table.attr('data-text-extraction') || c.textExtraction || 'basic'; // build headers buildHeaders(table); // fixate columns if the users supplies the fixedWidth option // do this after theme has been applied fixColumnWidth(table); // try to auto detect column type, and store in tables config buildParserCache(table); // start total row count at zero c.totalRows = 0; // build the cache for the tbody cells // delayInit will delay building the cache until the user starts a sort if (!c.delayInit) { buildCache(table); } // bind all header events and methods ts.bindEvents(table, c.$headers, true); bindMethods(table); // get sort list from jQuery data or metadata // in jQuery < 1.4, an error occurs when calling $table.data() if (c.supportsDataObject && typeof $table.data().sortlist !== 'undefined') { c.sortList = $table.data().sortlist; } else if (m && ($table.metadata() && $table.metadata().sortlist)) { c.sortList = $table.metadata().sortlist; } // apply widget init code ts.applyWidget(table, true); // if user has supplied a sort list to constructor if (c.sortList.length > 0) { $table.trigger("sorton", [c.sortList, {}, !c.initWidgets, true]); } else { setHeadersCss(table); if (c.initWidgets) { // apply widget format ts.applyWidget(table, false); } } // show processesing icon if (c.showProcessing) { $table .unbind('sortBegin' + c.namespace + ' sortEnd' + c.namespace) .bind('sortBegin' + c.namespace + ' sortEnd' + c.namespace, function(e) { clearTimeout(c.processTimer); ts.isProcessing(table); if (e.type === 'sortBegin') { c.processTimer = setTimeout(function(){ ts.isProcessing(table, true); }, 500); } }); } // initialized table.hasInitialized = true; table.isProcessing = false; if (c.debug) { ts.benchmark("Overall initialization time", $.data( table, 'startoveralltimer')); } $table.trigger('tablesorter-initialized', table); if (typeof c.initialized === 'function') { c.initialized(table); } }; ts.getColumnData = function(table, obj, indx, getCell){ if (typeof obj === 'undefined' || obj === null) { return; } table = $(table)[0]; var result, $h, k, c = table.config; if (obj[indx]) { return getCell ? obj[indx] : obj[c.$headers.index( c.$headers.filter('[data-column="' + indx + '"]:last') )]; } for (k in obj) { if (typeof k === 'string') { $h = c.$headers.filter('[data-column="' + indx + '"]:last') // header cell with class/id .filter(k) // find elements within the header cell with cell/id .add( c.$headers.filter('[data-column="' + indx + '"]:last').find(k) ); if ($h.length) { return obj[k]; } } } return result; }; // computeTableHeaderCellIndexes from: // http://www.javascripttoolbox.com/lib/table/examples.php // http://www.javascripttoolbox.com/temp/table_cellindex.html ts.computeColumnIndex = function(trs) { var matrix = [], lookup = {}, cols = 0, // determine the number of columns i, j, k, l, $cell, cell, cells, rowIndex, cellId, rowSpan, colSpan, firstAvailCol, matrixrow; for (i = 0; i < trs.length; i++) { cells = trs[i].cells; for (j = 0; j < cells.length; j++) { cell = cells[j]; $cell = $(cell); rowIndex = cell.parentNode.rowIndex; cellId = rowIndex + "-" + $cell.index(); rowSpan = cell.rowSpan || 1; colSpan = cell.colSpan || 1; if (typeof(matrix[rowIndex]) === "undefined") { matrix[rowIndex] = []; } // Find first available column in the first row for (k = 0; k < matrix[rowIndex].length + 1; k++) { if (typeof(matrix[rowIndex][k]) === "undefined") { firstAvailCol = k; break; } } lookup[cellId] = firstAvailCol; cols = Math.max(firstAvailCol, cols); // add data-column $cell.attr({ 'data-column' : firstAvailCol }); // 'data-row' : rowIndex for (k = rowIndex; k < rowIndex + rowSpan; k++) { if (typeof(matrix[k]) === "undefined") { matrix[k] = []; } matrixrow = matrix[k]; for (l = firstAvailCol; l < firstAvailCol + colSpan; l++) { matrixrow[l] = "x"; } } } } // may not be accurate if # header columns !== # tbody columns return cols + 1; // add one because it's a zero-based index }; // *** Process table *** // add processing indicator ts.isProcessing = function(table, toggle, $ths) { table = $(table); var c = table[0].config, // default to all headers $h = $ths || table.find('.' + ts.css.header); if (toggle) { // don't use sortList if custom $ths used if (typeof $ths !== 'undefined' && c.sortList.length > 0) { // get headers from the sortList $h = $h.filter(function(){ // get data-column from attr to keep compatibility with jQuery 1.2.6 return this.sortDisabled ? false : ts.isValueInArray( parseFloat($(this).attr('data-column')), c.sortList) >= 0; }); } table.add($h).addClass(ts.css.processing + ' ' + c.cssProcessing); } else { table.add($h).removeClass(ts.css.processing + ' ' + c.cssProcessing); } }; // detach tbody but save the position // don't use tbody because there are portions that look for a tbody index (updateCell) ts.processTbody = function(table, $tb, getIt){ table = $(table)[0]; var holdr; if (getIt) { table.isProcessing = true; $tb.before(''); holdr = ($.fn.detach) ? $tb.detach() : $tb.remove(); return holdr; } holdr = $(table).find('span.tablesorter-savemyplace'); $tb.insertAfter( holdr ); holdr.remove(); table.isProcessing = false; }; ts.clearTableBody = function(table) { $(table)[0].config.$tbodies.children().detach(); }; ts.bindEvents = function(table, $headers, core){ table = $(table)[0]; var downTime, c = table.config; if (core !== true) { c.$extraHeaders = c.$extraHeaders ? c.$extraHeaders.add($headers) : $headers; } // apply event handling to headers and/or additional headers (stickyheaders, scroller, etc) $headers // http://stackoverflow.com/questions/5312849/jquery-find-self; .find(c.selectorSort).add( $headers.filter(c.selectorSort) ) .unbind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' ')) .bind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' '), function(e, external) { var cell, type = e.type; // only recognize left clicks or enter if ( ((e.which || e.button) !== 1 && !/sort|keyup/.test(type)) || (type === 'keyup' && e.which !== 13) ) { return; } // ignore long clicks (prevents resizable widget from initializing a sort) if (type === 'mouseup' && external !== true && (new Date().getTime() - downTime > 250)) { return; } // set timer on mousedown if (type === 'mousedown') { downTime = new Date().getTime(); return /(input|select|button|textarea)/i.test(e.target.tagName) || // allow clicks to contents of selected cells $(e.target).closest('td,th').hasClass(c.cssAllowClicks) ? '' : !c.cancelSelection; } if (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); } // jQuery v1.2.6 doesn't have closest() cell = $.fn.closest ? $(this).closest('th, td')[0] : /TH|TD/.test(this.tagName) ? this : $(this).parents('th, td')[0]; // reference original table headers and find the same cell cell = c.$headers[ $headers.index( cell ) ]; if (!cell.sortDisabled) { initSort(table, cell, e); } }); if (c.cancelSelection) { // cancel selection $headers .attr('unselectable', 'on') .bind('selectstart', false) .css({ 'user-select': 'none', 'MozUserSelect': 'none' // not needed for jQuery 1.8+ }); } }; // restore headers ts.restoreHeaders = function(table){ var c = $(table)[0].config; // don't use c.$headers here in case header cells were swapped c.$table.find(c.selectorHeaders).each(function(i){ // only restore header cells if it is wrapped // because this is also used by the updateAll method if ($(this).find('.' + ts.css.headerIn).length){ $(this).html( c.headerContent[i] ); } }); }; ts.destroy = function(table, removeClasses, callback){ table = $(table)[0]; if (!table.hasInitialized) { return; } // remove all widgets ts.refreshWidgets(table, true, true); var $t = $(table), c = table.config, $h = $t.find('thead:first'), $r = $h.find('tr.' + ts.css.headerRow).removeClass(ts.css.headerRow + ' ' + c.cssHeaderRow), $f = $t.find('tfoot:first > tr').children('th, td'); if (removeClasses === false && $.inArray('uitheme', c.widgets) >= 0) { // reapply uitheme classes, in case we want to maintain appearance $t.trigger('applyWidgetId', ['uitheme']); $t.trigger('applyWidgetId', ['zebra']); } // remove widget added rows, just in case $h.find('tr').not($r).remove(); // disable tablesorter $t .removeData('tablesorter') .unbind('sortReset update updateAll updateRows updateCell addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave keypress sortBegin sortEnd resetToLoadState '.split(' ').join(c.namespace + ' ')); c.$headers.add($f) .removeClass( [ts.css.header, c.cssHeader, c.cssAsc, c.cssDesc, ts.css.sortAsc, ts.css.sortDesc, ts.css.sortNone].join(' ') ) .removeAttr('data-column') .removeAttr('aria-label') .attr('aria-disabled', 'true'); $r.find(c.selectorSort).unbind('mousedown mouseup keypress '.split(' ').join(c.namespace + ' ')); ts.restoreHeaders(table); $t.toggleClass(ts.css.table + ' ' + c.tableClass + ' tablesorter-' + c.theme, removeClasses === false); // clear flag in case the plugin is initialized again table.hasInitialized = false; delete table.config.cache; if (typeof callback === 'function') { callback(table); } }; // *** sort functions *** // regex used in natural sort ts.regex = { chunk : /(^([+\-]?(?:0|[1-9]\d*)(?:\.\d*)?(?:[eE][+\-]?\d+)?)?$|^0x[0-9a-f]+$|\d+)/gi, // chunk/tokenize numbers & letters chunks: /(^\\0|\\0$)/, // replace chunks @ ends hex: /^0x[0-9a-f]+$/i // hex }; // Natural sort - https://github.com/overset/javascript-natural-sort (date sorting removed) // this function will only accept strings, or you'll see "TypeError: undefined is not a function" // I could add a = a.toString(); b = b.toString(); but it'll slow down the sort overall ts.sortNatural = function(a, b) { if (a === b) { return 0; } var xN, xD, yN, yD, xF, yF, i, mx, r = ts.regex; // first try and sort Hex codes if (r.hex.test(b)) { xD = parseInt(a.match(r.hex), 16); yD = parseInt(b.match(r.hex), 16); if ( xD < yD ) { return -1; } if ( xD > yD ) { return 1; } } // chunk/tokenize xN = a.replace(r.chunk, '\\0$1\\0').replace(r.chunks, '').split('\\0'); yN = b.replace(r.chunk, '\\0$1\\0').replace(r.chunks, '').split('\\0'); mx = Math.max(xN.length, yN.length); // natural sorting through split numeric strings and default strings for (i = 0; i < mx; i++) { // find floats not starting with '0', string or 0 if not defined xF = isNaN(xN[i]) ? xN[i] || 0 : parseFloat(xN[i]) || 0; yF = isNaN(yN[i]) ? yN[i] || 0 : parseFloat(yN[i]) || 0; // handle numeric vs string comparison - number < string - (Kyle Adams) if (isNaN(xF) !== isNaN(yF)) { return (isNaN(xF)) ? 1 : -1; } // rely on string comparison if different types - i.e. '02' < 2 != '02' < '2' if (typeof xF !== typeof yF) { xF += ''; yF += ''; } if (xF < yF) { return -1; } if (xF > yF) { return 1; } } return 0; }; ts.sortNaturalAsc = function(a, b, col, table, c) { if (a === b) { return 0; } var e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; } return ts.sortNatural(a, b); }; ts.sortNaturalDesc = function(a, b, col, table, c) { if (a === b) { return 0; } var e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; } return ts.sortNatural(b, a); }; // basic alphabetical sort ts.sortText = function(a, b) { return a > b ? 1 : (a < b ? -1 : 0); }; // return text string value by adding up ascii value // so the text is somewhat sorted when using a digital sort // this is NOT an alphanumeric sort ts.getTextValue = function(a, num, mx) { if (mx) { // make sure the text value is greater than the max numerical value (mx) var i, l = a ? a.length : 0, n = mx + num; for (i = 0; i < l; i++) { n += a.charCodeAt(i); } return num * n; } return 0; }; ts.sortNumericAsc = function(a, b, num, mx, col, table) { if (a === b) { return 0; } var c = table.config, e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; } if (isNaN(a)) { a = ts.getTextValue(a, num, mx); } if (isNaN(b)) { b = ts.getTextValue(b, num, mx); } return a - b; }; ts.sortNumericDesc = function(a, b, num, mx, col, table) { if (a === b) { return 0; } var c = table.config, e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; } if (isNaN(a)) { a = ts.getTextValue(a, num, mx); } if (isNaN(b)) { b = ts.getTextValue(b, num, mx); } return b - a; }; ts.sortNumeric = function(a, b) { return a - b; }; // used when replacing accented characters during sorting ts.characterEquivalents = { "a" : "\u00e1\u00e0\u00e2\u00e3\u00e4\u0105\u00e5", // áàâãäąå "A" : "\u00c1\u00c0\u00c2\u00c3\u00c4\u0104\u00c5", // ÁÀÂÃÄĄÅ "c" : "\u00e7\u0107\u010d", // çćč "C" : "\u00c7\u0106\u010c", // ÇĆČ "e" : "\u00e9\u00e8\u00ea\u00eb\u011b\u0119", // éèêëěę "E" : "\u00c9\u00c8\u00ca\u00cb\u011a\u0118", // ÉÈÊËĚĘ "i" : "\u00ed\u00ec\u0130\u00ee\u00ef\u0131", // íìİîïı "I" : "\u00cd\u00cc\u0130\u00ce\u00cf", // ÍÌİÎÏ "o" : "\u00f3\u00f2\u00f4\u00f5\u00f6", // óòôõö "O" : "\u00d3\u00d2\u00d4\u00d5\u00d6", // ÓÒÔÕÖ "ss": "\u00df", // ß (s sharp) "SS": "\u1e9e", // ẞ (Capital sharp s) "u" : "\u00fa\u00f9\u00fb\u00fc\u016f", // úùûüů "U" : "\u00da\u00d9\u00db\u00dc\u016e" // ÚÙÛÜŮ }; ts.replaceAccents = function(s) { var a, acc = '[', eq = ts.characterEquivalents; if (!ts.characterRegex) { ts.characterRegexArray = {}; for (a in eq) { if (typeof a === 'string') { acc += eq[a]; ts.characterRegexArray[a] = new RegExp('[' + eq[a] + ']', 'g'); } } ts.characterRegex = new RegExp(acc + ']'); } if (ts.characterRegex.test(s)) { for (a in eq) { if (typeof a === 'string') { s = s.replace( ts.characterRegexArray[a], a ); } } } return s; }; // *** utilities *** ts.isValueInArray = function(column, arry) { var indx, len = arry.length; for (indx = 0; indx < len; indx++) { if (arry[indx][0] === column) { return indx; } } return -1; }; ts.addParser = function(parser) { var i, l = ts.parsers.length, a = true; for (i = 0; i < l; i++) { if (ts.parsers[i].id.toLowerCase() === parser.id.toLowerCase()) { a = false; } } if (a) { ts.parsers.push(parser); } }; ts.getParserById = function(name) { /*jshint eqeqeq:false */ if (name == 'false') { return false; } var i, l = ts.parsers.length; for (i = 0; i < l; i++) { if (ts.parsers[i].id.toLowerCase() === (name.toString()).toLowerCase()) { return ts.parsers[i]; } } return false; }; ts.addWidget = function(widget) { ts.widgets.push(widget); }; ts.hasWidget = function(table, name){ table = $(table); return table.length && table[0].config && table[0].config.widgetInit[name] || false; }; ts.getWidgetById = function(name) { var i, w, l = ts.widgets.length; for (i = 0; i < l; i++) { w = ts.widgets[i]; if (w && w.hasOwnProperty('id') && w.id.toLowerCase() === name.toLowerCase()) { return w; } } }; ts.applyWidget = function(table, init) { table = $(table)[0]; // in case this is called externally var c = table.config, wo = c.widgetOptions, tableClass = ' ' + c.table.className + ' ', widgets = [], time, time2, w, wd; // prevent numerous consecutive widget applications if (init !== false && table.hasInitialized && (table.isApplyingWidgets || table.isUpdating)) { return; } if (c.debug) { time = new Date(); } // look for widgets to apply from in table class // stop using \b otherwise this matches "ui-widget-content" & adds "content" widget wd = new RegExp( '\\s' + c.widgetClass.replace( /\{name\}/i, '([\\w-]+)' )+ '\\s', 'g' ); if ( tableClass.match( wd ) ) { // extract out the widget id from the table class (widget id's can include dashes) w = tableClass.match( wd ); if ( w ) { $.each( w, function( i,n ){ c.widgets.push( n.replace( wd, '$1' ) ); }); } } if (c.widgets.length) { table.isApplyingWidgets = true; // ensure unique widget ids c.widgets = $.grep(c.widgets, function(v, k){ return $.inArray(v, c.widgets) === k; }); // build widget array & add priority as needed $.each(c.widgets || [], function(i,n){ wd = ts.getWidgetById(n); if (wd && wd.id) { // set priority to 10 if not defined if (!wd.priority) { wd.priority = 10; } widgets[i] = wd; } }); // sort widgets by priority widgets.sort(function(a, b){ return a.priority < b.priority ? -1 : a.priority === b.priority ? 0 : 1; }); // add/update selected widgets $.each(widgets, function(i,w){ if (w) { if (init || !(c.widgetInit[w.id])) { // set init flag first to prevent calling init more than once (e.g. pager) c.widgetInit[w.id] = true; if (w.hasOwnProperty('options')) { wo = table.config.widgetOptions = $.extend( true, {}, w.options, wo ); } if (w.hasOwnProperty('init')) { if (c.debug) { time2 = new Date(); } w.init(table, w, c, wo); if (c.debug) { ts.benchmark('Initializing ' + w.id + ' widget', time2); } } } if (!init && w.hasOwnProperty('format')) { if (c.debug) { time2 = new Date(); } w.format(table, c, wo, false); if (c.debug) { ts.benchmark( ( init ? 'Initializing ' : 'Applying ' ) + w.id + ' widget', time2); } } } }); } setTimeout(function(){ table.isApplyingWidgets = false; $.data(table, 'lastWidgetApplication', new Date()); }, 0); if (c.debug) { w = c.widgets.length; benchmark("Completed " + (init === true ? "initializing " : "applying ") + w + " widget" + (w !== 1 ? "s" : ""), time); } }; ts.refreshWidgets = function(table, doAll, dontapply) { table = $(table)[0]; // see issue #243 var i, c = table.config, cw = c.widgets, w = ts.widgets, l = w.length; // remove previous widgets for (i = 0; i < l; i++){ if ( w[i] && w[i].id && (doAll || $.inArray( w[i].id, cw ) < 0) ) { if (c.debug) { log( 'Refeshing widgets: Removing "' + w[i].id + '"' ); } // only remove widgets that have been initialized - fixes #442 if (w[i].hasOwnProperty('remove') && c.widgetInit[w[i].id]) { w[i].remove(table, c, c.widgetOptions); c.widgetInit[w[i].id] = false; } } } if (dontapply !== true) { ts.applyWidget(table, doAll); } }; // get sorter, string, empty, etc options for each column from // jQuery data, metadata, header option or header class name ("sorter-false") // priority = jQuery data > meta > headers option > header class name ts.getData = function(h, ch, key) { var val = '', $h = $(h), m, cl; if (!$h.length) { return ''; } m = $.metadata ? $h.metadata() : false; cl = ' ' + ($h.attr('class') || ''); if (typeof $h.data(key) !== 'undefined' || typeof $h.data(key.toLowerCase()) !== 'undefined'){ // "data-lockedOrder" is assigned to "lockedorder"; but "data-locked-order" is assigned to "lockedOrder" // "data-sort-initial-order" is assigned to "sortInitialOrder" val += $h.data(key) || $h.data(key.toLowerCase()); } else if (m && typeof m[key] !== 'undefined') { val += m[key]; } else if (ch && typeof ch[key] !== 'undefined') { val += ch[key]; } else if (cl !== ' ' && cl.match(' ' + key + '-')) { // include sorter class name "sorter-text", etc; now works with "sorter-my-custom-parser" val = cl.match( new RegExp('\\s' + key + '-([\\w-]+)') )[1] || ''; } return $.trim(val); }; ts.formatFloat = function(s, table) { if (typeof s !== 'string' || s === '') { return s; } // allow using formatFloat without a table; defaults to US number format var i, t = table && table.config ? table.config.usNumberFormat !== false : typeof table !== "undefined" ? table : true; if (t) { // US Format - 1,234,567.89 -> 1234567.89 s = s.replace(/,/g,''); } else { // German Format = 1.234.567,89 -> 1234567.89 // French Format = 1 234 567,89 -> 1234567.89 s = s.replace(/[\s|\.]/g,'').replace(/,/g,'.'); } if(/^\s*\([.\d]+\)/.test(s)) { // make (#) into a negative number -> (10) = -10 s = s.replace(/^\s*\(([.\d]+)\)/, '-$1'); } i = parseFloat(s); // return the text instead of zero return isNaN(i) ? $.trim(s) : i; }; ts.isDigit = function(s) { // replace all unwanted chars and match return isNaN(s) ? (/^[\-+(]?\d+[)]?$/).test(s.toString().replace(/[,.'"\s]/g, '')) : true; }; }() }); // make shortcut var ts = $.tablesorter; // extend plugin scope $.fn.extend({ tablesorter: ts.construct }); // add default parsers ts.addParser({ id: 'no-parser', is: function() { return false; }, format: function() { return ''; }, type: 'text' }); ts.addParser({ id: "text", is: function() { return true; }, format: function(s, table) { var c = table.config; if (s) { s = $.trim( c.ignoreCase ? s.toLocaleLowerCase() : s ); s = c.sortLocaleCompare ? ts.replaceAccents(s) : s; } return s; }, type: "text" }); ts.addParser({ id: "digit", is: function(s) { return ts.isDigit(s); }, format: function(s, table) { var n = ts.formatFloat((s || '').replace(/[^\w,. \-()]/g, ""), table); return s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s; }, type: "numeric" }); ts.addParser({ id: "currency", is: function(s) { return (/^\(?\d+[\u00a3$\u20ac\u00a4\u00a5\u00a2?.]|[\u00a3$\u20ac\u00a4\u00a5\u00a2?.]\d+\)?$/).test((s || '').replace(/[+\-,. ]/g,'')); // £$€¤¥¢ }, format: function(s, table) { var n = ts.formatFloat((s || '').replace(/[^\w,. \-()]/g, ""), table); return s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s; }, type: "numeric" }); ts.addParser({ id: "url", is: function(s) { return (/^(https?|ftp|file):\/\//).test(s); }, format: function(s) { return s ? $.trim(s.replace(/(https?|ftp|file):\/\//, '')) : s; }, parsed : true, // filter widget flag type: "text" }); ts.addParser({ id: "isoDate", is: function(s) { return (/^\d{4}[\/\-]\d{1,2}[\/\-]\d{1,2}/).test(s); }, format: function(s, table) { var date = s ? new Date( s.replace(/-/g, "/") ) : s; return date instanceof Date && isFinite(date) ? date.getTime() : s; }, type: "numeric" }); ts.addParser({ id: "percent", is: function(s) { return (/(\d\s*?%|%\s*?\d)/).test(s) && s.length < 15; }, format: function(s, table) { return s ? ts.formatFloat(s.replace(/%/g, ""), table) : s; }, type: "numeric" }); // added image parser to core v2.17.9 ts.addParser({ id: "image", is: function(s, table, node, $node){ return $node.find('img').length > 0; }, format: function(s, table, cell) { return $(cell).find('img').attr(table.config.imgAttr || 'alt') || s; }, parsed : true, // filter widget flag type: "text" }); ts.addParser({ id: "usLongDate", is: function(s) { // two digit years are not allowed cross-browser // Jan 01, 2013 12:34:56 PM or 01 Jan 2013 return (/^[A-Z]{3,10}\.?\s+\d{1,2},?\s+(\d{4})(\s+\d{1,2}:\d{2}(:\d{2})?(\s+[AP]M)?)?$/i).test(s) || (/^\d{1,2}\s+[A-Z]{3,10}\s+\d{4}/i).test(s); }, format: function(s, table) { var date = s ? new Date( s.replace(/(\S)([AP]M)$/i, "$1 $2") ) : s; return date instanceof Date && isFinite(date) ? date.getTime() : s; }, type: "numeric" }); ts.addParser({ id: "shortDate", // "mmddyyyy", "ddmmyyyy" or "yyyymmdd" is: function(s) { // testing for ##-##-#### or ####-##-##, so it's not perfect; time can be included return (/(^\d{1,2}[\/\s]\d{1,2}[\/\s]\d{4})|(^\d{4}[\/\s]\d{1,2}[\/\s]\d{1,2})/).test((s || '').replace(/\s+/g," ").replace(/[\-.,]/g, "/")); }, format: function(s, table, cell, cellIndex) { if (s) { var date, d, c = table.config, ci = c.$headers.filter('[data-column=' + cellIndex + ']:last'), format = ci.length && ci[0].dateFormat || ts.getData( ci, ts.getColumnData( table, c.headers, cellIndex ), 'dateFormat') || c.dateFormat; d = s.replace(/\s+/g," ").replace(/[\-.,]/g, "/"); // escaped - because JSHint in Firefox was showing it as an error if (format === "mmddyyyy") { d = d.replace(/(\d{1,2})[\/\s](\d{1,2})[\/\s](\d{4})/, "$3/$1/$2"); } else if (format === "ddmmyyyy") { d = d.replace(/(\d{1,2})[\/\s](\d{1,2})[\/\s](\d{4})/, "$3/$2/$1"); } else if (format === "yyyymmdd") { d = d.replace(/(\d{4})[\/\s](\d{1,2})[\/\s](\d{1,2})/, "$1/$2/$3"); } date = new Date(d); return date instanceof Date && isFinite(date) ? date.getTime() : s; } return s; }, type: "numeric" }); ts.addParser({ id: "time", is: function(s) { return (/^(([0-2]?\d:[0-5]\d)|([0-1]?\d:[0-5]\d\s?([AP]M)))$/i).test(s); }, format: function(s, table) { var date = s ? new Date( "2000/01/01 " + s.replace(/(\S)([AP]M)$/i, "$1 $2") ) : s; return date instanceof Date && isFinite(date) ? date.getTime() : s; }, type: "numeric" }); ts.addParser({ id: "metadata", is: function() { return false; }, format: function(s, table, cell) { var c = table.config, p = (!c.parserMetadataName) ? 'sortValue' : c.parserMetadataName; return $(cell).metadata()[p]; }, type: "numeric" }); // add default widgets ts.addWidget({ id: "zebra", priority: 90, format: function(table, c, wo) { var $tb, $tv, $tr, row, even, time, k, child = new RegExp(c.cssChildRow, 'i'), b = c.$tbodies; if (c.debug) { time = new Date(); } for (k = 0; k < b.length; k++ ) { // loop through the visible rows row = 0; $tb = b.eq(k); $tv = $tb.children('tr:visible').not(c.selectorRemove); // revered back to using jQuery each - strangely it's the fastest method /*jshint loopfunc:true */ $tv.each(function(){ $tr = $(this); // style child rows the same way the parent row was styled if (!child.test(this.className)) { row++; } even = (row % 2 === 0); $tr.removeClass(wo.zebra[even ? 1 : 0]).addClass(wo.zebra[even ? 0 : 1]); }); } }, remove: function(table, c, wo){ var k, $tb, b = c.$tbodies, rmv = (wo.zebra || [ "even", "odd" ]).join(' '); for (k = 0; k < b.length; k++ ){ $tb = ts.processTbody(table, b.eq(k), true); // remove tbody $tb.children().removeClass(rmv); ts.processTbody(table, $tb, false); // restore tbody } } }); })(jQuery); ================================================ FILE: vendor/assets/javascripts/jquery.timeago.js ================================================ /** * Timeago is a jQuery plugin that makes it easy to support automatically * updating fuzzy timestamps (e.g. "4 minutes ago" or "about 1 day ago"). * * @name timeago * @version 1.4.3 * @requires jQuery v1.2.3+ * @author Ryan McGeary * @license MIT License - http://www.opensource.org/licenses/mit-license.php * * For usage and examples, visit: * http://timeago.yarp.com/ * * Copyright (c) 2008-2015, Ryan McGeary (ryan -[at]- mcgeary [*dot*] org) */ (function (factory) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define(['jquery'], factory); } else if (typeof module === 'object' && typeof module.exports === 'object') { factory(require('jquery')); } else { // Browser globals factory(jQuery); } }(function ($) { $.timeago = function(timestamp) { if (timestamp instanceof Date) { return inWords(timestamp); } else if (typeof timestamp === "string") { return inWords($.timeago.parse(timestamp)); } else if (typeof timestamp === "number") { return inWords(new Date(timestamp)); } else { return inWords($.timeago.datetime(timestamp)); } }; var $t = $.timeago; $.extend($.timeago, { settings: { refreshMillis: 60000, allowPast: true, allowFuture: false, localeTitle: false, cutoff: 0, strings: { prefixAgo: null, prefixFromNow: null, suffixAgo: "ago", suffixFromNow: "from now", inPast: 'any moment now', seconds: "less than a minute", minute: "about a minute", minutes: "%d minutes", hour: "about an hour", hours: "about %d hours", day: "a day", days: "%d days", month: "about a month", months: "%d months", year: "about a year", years: "%d years", wordSeparator: " ", numbers: [] } }, inWords: function(distanceMillis) { if(!this.settings.allowPast && ! this.settings.allowFuture) { throw 'timeago allowPast and allowFuture settings can not both be set to false.'; } var $l = this.settings.strings; var prefix = $l.prefixAgo; var suffix = $l.suffixAgo; if (this.settings.allowFuture) { if (distanceMillis < 0) { prefix = $l.prefixFromNow; suffix = $l.suffixFromNow; } } if(!this.settings.allowPast && distanceMillis >= 0) { return this.settings.strings.inPast; } var seconds = Math.abs(distanceMillis) / 1000; var minutes = seconds / 60; var hours = minutes / 60; var days = hours / 24; var years = days / 365; function substitute(stringOrFunction, number) { var string = $.isFunction(stringOrFunction) ? stringOrFunction(number, distanceMillis) : stringOrFunction; var value = ($l.numbers && $l.numbers[number]) || number; return string.replace(/%d/i, value); } var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) || seconds < 90 && substitute($l.minute, 1) || minutes < 45 && substitute($l.minutes, Math.round(minutes)) || minutes < 90 && substitute($l.hour, 1) || hours < 24 && substitute($l.hours, Math.round(hours)) || hours < 42 && substitute($l.day, 1) || days < 30 && substitute($l.days, Math.round(days)) || days < 45 && substitute($l.month, 1) || days < 365 && substitute($l.months, Math.round(days / 30)) || years < 1.5 && substitute($l.year, 1) || substitute($l.years, Math.round(years)); var separator = $l.wordSeparator || ""; if ($l.wordSeparator === undefined) { separator = " "; } return $.trim([prefix, words, suffix].join(separator)); }, parse: function(iso8601) { var s = $.trim(iso8601); s = s.replace(/\.\d+/,""); // remove milliseconds s = s.replace(/-/,"/").replace(/-/,"/"); s = s.replace(/T/," ").replace(/Z/," UTC"); s = s.replace(/([\+\-]\d\d)\:?(\d\d)/," $1$2"); // -04:00 -> -0400 s = s.replace(/([\+\-]\d\d)$/," $100"); // +09 -> +0900 return new Date(s); }, datetime: function(elem) { var iso8601 = $t.isTime(elem) ? $(elem).attr("datetime") : $(elem).attr("title"); return $t.parse(iso8601); }, isTime: function(elem) { // jQuery's `is()` doesn't play well with HTML5 in IE return $(elem).get(0).tagName.toLowerCase() === "time"; // $(elem).is("time"); } }); // functions that can be called via $(el).timeago('action') // init is default when no action is given // functions are called with context of a single element var functions = { init: function(){ var refresh_el = $.proxy(refresh, this); refresh_el(); var $s = $t.settings; if ($s.refreshMillis > 0) { this._timeagoInterval = setInterval(refresh_el, $s.refreshMillis); } }, update: function(time){ var parsedTime = $t.parse(time); $(this).data('timeago', { datetime: parsedTime }); if($t.settings.localeTitle) $(this).attr("title", parsedTime.toLocaleString()); refresh.apply(this); }, updateFromDOM: function(){ $(this).data('timeago', { datetime: $t.parse( $t.isTime(this) ? $(this).attr("datetime") : $(this).attr("title") ) }); refresh.apply(this); }, dispose: function () { if (this._timeagoInterval) { window.clearInterval(this._timeagoInterval); this._timeagoInterval = null; } } }; $.fn.timeago = function(action, options) { var fn = action ? functions[action] : functions.init; if(!fn){ throw new Error("Unknown function name '"+ action +"' for timeago"); } // each over objects here and call the requested function this.each(function(){ fn.call(this, options); }); return this; }; function refresh() { //check if it's still visible if(!$.contains(document.documentElement,this)){ //stop if it has been removed $(this).timeago("dispose"); return this; } var data = prepareData(this); var $s = $t.settings; if (!isNaN(data.datetime)) { if ( $s.cutoff == 0 || Math.abs(distance(data.datetime)) < $s.cutoff) { $(this).text(inWords(data.datetime)); } } return this; } function prepareData(element) { element = $(element); if (!element.data("timeago")) { element.data("timeago", { datetime: $t.datetime(element) }); var text = $.trim(element.text()); if ($t.settings.localeTitle) { element.attr("title", element.data('timeago').datetime.toLocaleString()); } else if (text.length > 0 && !($t.isTime(element) && element.attr("title"))) { element.attr("title", text); } } return element.data("timeago"); } function inWords(date) { return $t.inWords(distance(date)); } function distance(date) { return (new Date().getTime() - date.getTime()); } // fix for IE6 suckage document.createElement("abbr"); document.createElement("time"); })); ================================================ FILE: vendor/assets/javascripts/jquery.tipTip.js ================================================ /* * TipTip * Copyright 2010 Drew Wilson * www.drewwilson.com * code.drewwilson.com/entry/tiptip-jquery-plugin * * Version 1.3 - Updated: Mar. 23, 2010 * * This Plug-In will create a custom tooltip to replace the default * browser tooltip. It is extremely lightweight and very smart in * that it detects the edges of the browser window and will make sure * the tooltip stays within the current window size. As a result the * tooltip will adjust itself to be displayed above, below, to the left * or to the right depending on what is necessary to stay within the * browser window. It is completely customizable as well via CSS. * * This TipTip jQuery plug-in is dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl.html */ (function($){ $.fn.tipTip = function(options) { var defaults = { activation: "hover", keepAlive: false, maxWidth: "200px", edgeOffset: 3, defaultPosition: "bottom", delay: 400, fadeIn: 200, fadeOut: 200, attribute: "title", content: false, // HTML or String to fill TipTIp with enter: function(){}, exit: function(){} }; var opts = $.extend(defaults, options); // Setup tip tip elements and render them to the DOM if($("#tiptip_holder").length <= 0){ var tiptip_holder = $('
'); var tiptip_content = $('
'); var tiptip_arrow = $('
'); $("body").append(tiptip_holder.html(tiptip_content).prepend(tiptip_arrow.html('
'))); } else { var tiptip_holder = $("#tiptip_holder"); var tiptip_content = $("#tiptip_content"); var tiptip_arrow = $("#tiptip_arrow"); } return this.each(function(){ var org_elem = $(this); if(opts.content){ var org_title = opts.content; } else { var org_title = org_elem.attr(opts.attribute); } if(org_title != ""){ if(!opts.content){ org_elem.removeAttr(opts.attribute); //remove original Attribute } var timeout = false; if(opts.activation == "hover"){ org_elem.hover(function(){ active_tiptip(); }, function(){ if(!opts.keepAlive){ deactive_tiptip(); } }); if(opts.keepAlive){ tiptip_holder.hover(function(){}, function(){ deactive_tiptip(); }); } } else if(opts.activation == "focus"){ org_elem.focus(function(){ active_tiptip(); }).blur(function(){ deactive_tiptip(); }); } else if(opts.activation == "click"){ org_elem.click(function(){ active_tiptip(); return false; }).hover(function(){},function(){ if(!opts.keepAlive){ deactive_tiptip(); } }); if(opts.keepAlive){ tiptip_holder.hover(function(){}, function(){ deactive_tiptip(); }); } } function active_tiptip(){ opts.enter.call(this); tiptip_content.html(org_title); tiptip_holder.hide().removeAttr("class").css("margin","0"); tiptip_arrow.removeAttr("style"); var top = parseInt(org_elem.offset()['top']); var left = parseInt(org_elem.offset()['left']); var org_width = parseInt(org_elem.outerWidth()); var org_height = parseInt(org_elem.outerHeight()); var tip_w = tiptip_holder.outerWidth(); var tip_h = tiptip_holder.outerHeight(); var w_compare = Math.round((org_width - tip_w) / 2); var h_compare = Math.round((org_height - tip_h) / 2); var marg_left = Math.round(left + w_compare); var marg_top = Math.round(top + org_height + opts.edgeOffset); var t_class = ""; var arrow_top = ""; var arrow_left = Math.round(tip_w - 12) / 2; if(opts.defaultPosition == "bottom"){ t_class = "_bottom"; } else if(opts.defaultPosition == "top"){ t_class = "_top"; } else if(opts.defaultPosition == "left"){ t_class = "_left"; } else if(opts.defaultPosition == "right"){ t_class = "_right"; } var right_compare = (w_compare + left) < parseInt($(window).scrollLeft()); var left_compare = (tip_w + left) > parseInt($(window).width()); if((right_compare && w_compare < 0) || (t_class == "_right" && !left_compare) || (t_class == "_left" && left < (tip_w + opts.edgeOffset + 5))){ t_class = "_right"; arrow_top = Math.round(tip_h - 13) / 2; arrow_left = -12; marg_left = Math.round(left + org_width + opts.edgeOffset); marg_top = Math.round(top + h_compare); } else if((left_compare && w_compare < 0) || (t_class == "_left" && !right_compare)){ t_class = "_left"; arrow_top = Math.round(tip_h - 13) / 2; arrow_left = Math.round(tip_w); marg_left = Math.round(left - (tip_w + opts.edgeOffset + 5)); marg_top = Math.round(top + h_compare); } var top_compare = (top + org_height + opts.edgeOffset + tip_h + 8) > parseInt($(window).height() + $(window).scrollTop()); var bottom_compare = ((top + org_height) - (opts.edgeOffset + tip_h + 8)) < 0; if(top_compare || (t_class == "_bottom" && top_compare) || (t_class == "_top" && !bottom_compare)){ if(t_class == "_top" || t_class == "_bottom"){ t_class = "_top"; } else { t_class = t_class+"_top"; } arrow_top = tip_h; marg_top = Math.round(top - (tip_h + 5 + opts.edgeOffset)); } else if(bottom_compare | (t_class == "_top" && bottom_compare) || (t_class == "_bottom" && !top_compare)){ if(t_class == "_top" || t_class == "_bottom"){ t_class = "_bottom"; } else { t_class = t_class+"_bottom"; } arrow_top = -12; marg_top = Math.round(top + org_height + opts.edgeOffset); } if(t_class == "_right_top" || t_class == "_left_top"){ marg_top = marg_top + 5; } else if(t_class == "_right_bottom" || t_class == "_left_bottom"){ marg_top = marg_top - 5; } if(t_class == "_left_top" || t_class == "_left_bottom"){ marg_left = marg_left + 5; } tiptip_arrow.css({"margin-left": arrow_left+"px", "margin-top": arrow_top+"px"}); tiptip_holder.css({"margin-left": marg_left+"px", "margin-top": marg_top+"px"}).attr("class","tip"+t_class); if (timeout){ clearTimeout(timeout); } timeout = setTimeout(function(){ tiptip_holder.stop(true,true).fadeIn(opts.fadeIn); }, opts.delay); } function deactive_tiptip(){ opts.exit.call(this); if (timeout){ clearTimeout(timeout); } tiptip_holder.fadeOut(opts.fadeOut); } } }); } })(jQuery); ================================================ FILE: vendor/assets/javascripts/moment.js ================================================ // moment.js // version : 2.0.0 // author : Tim Wood // license : MIT // momentjs.com (function (undefined) { /************************************ Constants ************************************/ var moment, VERSION = "2.0.0", round = Math.round, i, // internal storage for language config files languages = {}, // check for nodeJS hasModule = (typeof module !== 'undefined' && module.exports), // ASP.NET json date format regex aspNetJsonRegex = /^\/?Date\((\-?\d+)/i, // format tokens formattingTokens = /(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|.)/g, localFormattingTokens = /(\[[^\[]*\])|(\\)?(LT|LL?L?L?|l{1,4})/g, // parsing tokens parseMultipleFormatChunker = /([0-9a-zA-Z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+)/gi, // parsing token regexes parseTokenOneOrTwoDigits = /\d\d?/, // 0 - 99 parseTokenOneToThreeDigits = /\d{1,3}/, // 0 - 999 parseTokenThreeDigits = /\d{3}/, // 000 - 999 parseTokenFourDigits = /\d{1,4}/, // 0 - 9999 parseTokenSixDigits = /[+\-]?\d{1,6}/, // -999,999 - 999,999 parseTokenWord = /[0-9]*[a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF]+\s*?[\u0600-\u06FF]+/i, // any word (or two) characters or numbers including two word month in arabic. parseTokenTimezone = /Z|[\+\-]\d\d:?\d\d/i, // +00:00 -00:00 +0000 -0000 or Z parseTokenT = /T/i, // T (ISO seperator) parseTokenTimestampMs = /[\+\-]?\d+(\.\d{1,3})?/, // 123456789 123456789.123 // preliminary iso regex // 0000-00-00 + T + 00 or 00:00 or 00:00:00 or 00:00:00.000 + +00:00 or +0000 isoRegex = /^\s*\d{4}-\d\d-\d\d((T| )(\d\d(:\d\d(:\d\d(\.\d\d?\d?)?)?)?)?([\+\-]\d\d:?\d\d)?)?/, isoFormat = 'YYYY-MM-DDTHH:mm:ssZ', // iso time formats and regexes isoTimes = [ ['HH:mm:ss.S', /(T| )\d\d:\d\d:\d\d\.\d{1,3}/], ['HH:mm:ss', /(T| )\d\d:\d\d:\d\d/], ['HH:mm', /(T| )\d\d:\d\d/], ['HH', /(T| )\d\d/] ], // timezone chunker "+10:00" > ["10", "00"] or "-1530" > ["-15", "30"] parseTimezoneChunker = /([\+\-]|\d\d)/gi, // getter and setter names proxyGettersAndSetters = 'Month|Date|Hours|Minutes|Seconds|Milliseconds'.split('|'), unitMillisecondFactors = { 'Milliseconds' : 1, 'Seconds' : 1e3, 'Minutes' : 6e4, 'Hours' : 36e5, 'Days' : 864e5, 'Months' : 2592e6, 'Years' : 31536e6 }, // format function strings formatFunctions = {}, // tokens to ordinalize and pad ordinalizeTokens = 'DDD w W M D d'.split(' '), paddedTokens = 'M D H h m s w W'.split(' '), formatTokenFunctions = { M : function () { return this.month() + 1; }, MMM : function (format) { return this.lang().monthsShort(this, format); }, MMMM : function (format) { return this.lang().months(this, format); }, D : function () { return this.date(); }, DDD : function () { return this.dayOfYear(); }, d : function () { return this.day(); }, dd : function (format) { return this.lang().weekdaysMin(this, format); }, ddd : function (format) { return this.lang().weekdaysShort(this, format); }, dddd : function (format) { return this.lang().weekdays(this, format); }, w : function () { return this.week(); }, W : function () { return this.isoWeek(); }, YY : function () { return leftZeroFill(this.year() % 100, 2); }, YYYY : function () { return leftZeroFill(this.year(), 4); }, YYYYY : function () { return leftZeroFill(this.year(), 5); }, a : function () { return this.lang().meridiem(this.hours(), this.minutes(), true); }, A : function () { return this.lang().meridiem(this.hours(), this.minutes(), false); }, H : function () { return this.hours(); }, h : function () { return this.hours() % 12 || 12; }, m : function () { return this.minutes(); }, s : function () { return this.seconds(); }, S : function () { return ~~(this.milliseconds() / 100); }, SS : function () { return leftZeroFill(~~(this.milliseconds() / 10), 2); }, SSS : function () { return leftZeroFill(this.milliseconds(), 3); }, Z : function () { var a = -this.zone(), b = "+"; if (a < 0) { a = -a; b = "-"; } return b + leftZeroFill(~~(a / 60), 2) + ":" + leftZeroFill(~~a % 60, 2); }, ZZ : function () { var a = -this.zone(), b = "+"; if (a < 0) { a = -a; b = "-"; } return b + leftZeroFill(~~(10 * a / 6), 4); }, X : function () { return this.unix(); } }; function padToken(func, count) { return function (a) { return leftZeroFill(func.call(this, a), count); }; } function ordinalizeToken(func) { return function (a) { return this.lang().ordinal(func.call(this, a)); }; } while (ordinalizeTokens.length) { i = ordinalizeTokens.pop(); formatTokenFunctions[i + 'o'] = ordinalizeToken(formatTokenFunctions[i]); } while (paddedTokens.length) { i = paddedTokens.pop(); formatTokenFunctions[i + i] = padToken(formatTokenFunctions[i], 2); } formatTokenFunctions.DDDD = padToken(formatTokenFunctions.DDD, 3); /************************************ Constructors ************************************/ function Language() { } // Moment prototype object function Moment(config) { extend(this, config); } // Duration Constructor function Duration(duration) { var data = this._data = {}, years = duration.years || duration.year || duration.y || 0, months = duration.months || duration.month || duration.M || 0, weeks = duration.weeks || duration.week || duration.w || 0, days = duration.days || duration.day || duration.d || 0, hours = duration.hours || duration.hour || duration.h || 0, minutes = duration.minutes || duration.minute || duration.m || 0, seconds = duration.seconds || duration.second || duration.s || 0, milliseconds = duration.milliseconds || duration.millisecond || duration.ms || 0; // representation for dateAddRemove this._milliseconds = milliseconds + seconds * 1e3 + // 1000 minutes * 6e4 + // 1000 * 60 hours * 36e5; // 1000 * 60 * 60 // Because of dateAddRemove treats 24 hours as different from a // day when working around DST, we need to store them separately this._days = days + weeks * 7; // It is impossible translate months into days without knowing // which months you are are talking about, so we have to store // it separately. this._months = months + years * 12; // The following code bubbles up values, see the tests for // examples of what that means. data.milliseconds = milliseconds % 1000; seconds += absRound(milliseconds / 1000); data.seconds = seconds % 60; minutes += absRound(seconds / 60); data.minutes = minutes % 60; hours += absRound(minutes / 60); data.hours = hours % 24; days += absRound(hours / 24); days += weeks * 7; data.days = days % 30; months += absRound(days / 30); data.months = months % 12; years += absRound(months / 12); data.years = years; } /************************************ Helpers ************************************/ function extend(a, b) { for (var i in b) { if (b.hasOwnProperty(i)) { a[i] = b[i]; } } return a; } function absRound(number) { if (number < 0) { return Math.ceil(number); } else { return Math.floor(number); } } // left zero fill a number // see http://jsperf.com/left-zero-filling for performance comparison function leftZeroFill(number, targetLength) { var output = number + ''; while (output.length < targetLength) { output = '0' + output; } return output; } // helper function for _.addTime and _.subtractTime function addOrSubtractDurationFromMoment(mom, duration, isAdding) { var ms = duration._milliseconds, d = duration._days, M = duration._months, currentDate; if (ms) { mom._d.setTime(+mom + ms * isAdding); } if (d) { mom.date(mom.date() + d * isAdding); } if (M) { currentDate = mom.date(); mom.date(1) .month(mom.month() + M * isAdding) .date(Math.min(currentDate, mom.daysInMonth())); } } // check if is an array function isArray(input) { return Object.prototype.toString.call(input) === '[object Array]'; } // compare two arrays, return the number of differences function compareArrays(array1, array2) { var len = Math.min(array1.length, array2.length), lengthDiff = Math.abs(array1.length - array2.length), diffs = 0, i; for (i = 0; i < len; i++) { if (~~array1[i] !== ~~array2[i]) { diffs++; } } return diffs + lengthDiff; } /************************************ Languages ************************************/ Language.prototype = { set : function (config) { var prop, i; for (i in config) { prop = config[i]; if (typeof prop === 'function') { this[i] = prop; } else { this['_' + i] = prop; } } }, _months : "January_February_March_April_May_June_July_August_September_October_November_December".split("_"), months : function (m) { return this._months[m.month()]; }, _monthsShort : "Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"), monthsShort : function (m) { return this._monthsShort[m.month()]; }, monthsParse : function (monthName) { var i, mom, regex, output; if (!this._monthsParse) { this._monthsParse = []; } for (i = 0; i < 12; i++) { // make the regex if we don't have it already if (!this._monthsParse[i]) { mom = moment([2000, i]); regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, ''); this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i'); } // test the regex if (this._monthsParse[i].test(monthName)) { return i; } } }, _weekdays : "Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"), weekdays : function (m) { return this._weekdays[m.day()]; }, _weekdaysShort : "Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"), weekdaysShort : function (m) { return this._weekdaysShort[m.day()]; }, _weekdaysMin : "Su_Mo_Tu_We_Th_Fr_Sa".split("_"), weekdaysMin : function (m) { return this._weekdaysMin[m.day()]; }, _longDateFormat : { LT : "h:mm A", L : "MM/DD/YYYY", LL : "MMMM D YYYY", LLL : "MMMM D YYYY LT", LLLL : "dddd, MMMM D YYYY LT" }, longDateFormat : function (key) { var output = this._longDateFormat[key]; if (!output && this._longDateFormat[key.toUpperCase()]) { output = this._longDateFormat[key.toUpperCase()].replace(/MMMM|MM|DD|dddd/g, function (val) { return val.slice(1); }); this._longDateFormat[key] = output; } return output; }, meridiem : function (hours, minutes, isLower) { if (hours > 11) { return isLower ? 'pm' : 'PM'; } else { return isLower ? 'am' : 'AM'; } }, _calendar : { sameDay : '[Today at] LT', nextDay : '[Tomorrow at] LT', nextWeek : 'dddd [at] LT', lastDay : '[Yesterday at] LT', lastWeek : '[last] dddd [at] LT', sameElse : 'L' }, calendar : function (key, mom) { var output = this._calendar[key]; return typeof output === 'function' ? output.apply(mom) : output; }, _relativeTime : { future : "in %s", past : "%s ago", s : "a few seconds", m : "a minute", mm : "%d minutes", h : "an hour", hh : "%d hours", d : "a day", dd : "%d days", M : "a month", MM : "%d months", y : "a year", yy : "%d years" }, relativeTime : function (number, withoutSuffix, string, isFuture) { var output = this._relativeTime[string]; return (typeof output === 'function') ? output(number, withoutSuffix, string, isFuture) : output.replace(/%d/i, number); }, pastFuture : function (diff, output) { var format = this._relativeTime[diff > 0 ? 'future' : 'past']; return typeof format === 'function' ? format(output) : format.replace(/%s/i, output); }, ordinal : function (number) { return this._ordinal.replace("%d", number); }, _ordinal : "%d", preparse : function (string) { return string; }, postformat : function (string) { return string; }, week : function (mom) { return weekOfYear(mom, this._week.dow, this._week.doy); }, _week : { dow : 0, // Sunday is the first day of the week. doy : 6 // The week that contains Jan 1st is the first week of the year. } }; // Loads a language definition into the `languages` cache. The function // takes a key and optionally values. If not in the browser and no values // are provided, it will load the language file module. As a convenience, // this function also returns the language values. function loadLang(key, values) { values.abbr = key; if (!languages[key]) { languages[key] = new Language(); } languages[key].set(values); return languages[key]; } // Determines which language definition to use and returns it. // // With no parameters, it will return the global language. If you // pass in a language key, such as 'en', it will return the // definition for 'en', so long as 'en' has already been loaded using // moment.lang. function getLangDefinition(key) { if (!key) { return moment.fn._lang; } if (!languages[key] && hasModule) { require('./lang/' + key); } return languages[key]; } /************************************ Formatting ************************************/ function removeFormattingTokens(input) { if (input.match(/\[.*\]/)) { return input.replace(/^\[|\]$/g, ""); } return input.replace(/\\/g, ""); } function makeFormatFunction(format) { var array = format.match(formattingTokens), i, length; for (i = 0, length = array.length; i < length; i++) { if (formatTokenFunctions[array[i]]) { array[i] = formatTokenFunctions[array[i]]; } else { array[i] = removeFormattingTokens(array[i]); } } return function (mom) { var output = ""; for (i = 0; i < length; i++) { output += typeof array[i].call === 'function' ? array[i].call(mom, format) : array[i]; } return output; }; } // format date using native date object function formatMoment(m, format) { var i = 5; function replaceLongDateFormatTokens(input) { return m.lang().longDateFormat(input) || input; } while (i-- && localFormattingTokens.test(format)) { format = format.replace(localFormattingTokens, replaceLongDateFormatTokens); } if (!formatFunctions[format]) { formatFunctions[format] = makeFormatFunction(format); } return formatFunctions[format](m); } /************************************ Parsing ************************************/ // get the regex to find the next token function getParseRegexForToken(token) { switch (token) { case 'DDDD': return parseTokenThreeDigits; case 'YYYY': return parseTokenFourDigits; case 'YYYYY': return parseTokenSixDigits; case 'S': case 'SS': case 'SSS': case 'DDD': return parseTokenOneToThreeDigits; case 'MMM': case 'MMMM': case 'dd': case 'ddd': case 'dddd': case 'a': case 'A': return parseTokenWord; case 'X': return parseTokenTimestampMs; case 'Z': case 'ZZ': return parseTokenTimezone; case 'T': return parseTokenT; case 'MM': case 'DD': case 'YY': case 'HH': case 'hh': case 'mm': case 'ss': case 'M': case 'D': case 'd': case 'H': case 'h': case 'm': case 's': return parseTokenOneOrTwoDigits; default : return new RegExp(token.replace('\\', '')); } } // function to convert string input to date function addTimeToArrayFromToken(token, input, config) { var a, b, datePartArray = config._a; switch (token) { // MONTH case 'M' : // fall through to MM case 'MM' : datePartArray[1] = (input == null) ? 0 : ~~input - 1; break; case 'MMM' : // fall through to MMMM case 'MMMM' : a = getLangDefinition(config._l).monthsParse(input); // if we didn't find a month name, mark the date as invalid. if (a != null) { datePartArray[1] = a; } else { config._isValid = false; } break; // DAY OF MONTH case 'D' : // fall through to DDDD case 'DD' : // fall through to DDDD case 'DDD' : // fall through to DDDD case 'DDDD' : if (input != null) { datePartArray[2] = ~~input; } break; // YEAR case 'YY' : datePartArray[0] = ~~input + (~~input > 68 ? 1900 : 2000); break; case 'YYYY' : case 'YYYYY' : datePartArray[0] = ~~input; break; // AM / PM case 'a' : // fall through to A case 'A' : config._isPm = ((input + '').toLowerCase() === 'pm'); break; // 24 HOUR case 'H' : // fall through to hh case 'HH' : // fall through to hh case 'h' : // fall through to hh case 'hh' : datePartArray[3] = ~~input; break; // MINUTE case 'm' : // fall through to mm case 'mm' : datePartArray[4] = ~~input; break; // SECOND case 's' : // fall through to ss case 'ss' : datePartArray[5] = ~~input; break; // MILLISECOND case 'S' : case 'SS' : case 'SSS' : datePartArray[6] = ~~ (('0.' + input) * 1000); break; // UNIX TIMESTAMP WITH MS case 'X': config._d = new Date(parseFloat(input) * 1000); break; // TIMEZONE case 'Z' : // fall through to ZZ case 'ZZ' : config._useUTC = true; a = (input + '').match(parseTimezoneChunker); if (a && a[1]) { config._tzh = ~~a[1]; } if (a && a[2]) { config._tzm = ~~a[2]; } // reverse offsets if (a && a[0] === '+') { config._tzh = -config._tzh; config._tzm = -config._tzm; } break; } // if the input is null, the date is not valid if (input == null) { config._isValid = false; } } // convert an array to a date. // the array should mirror the parameters below // note: all values past the year are optional and will default to the lowest possible value. // [year, month, day , hour, minute, second, millisecond] function dateFromArray(config) { var i, date, input = []; if (config._d) { return; } for (i = 0; i < 7; i++) { config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i]; } // add the offsets to the time to be parsed so that we can have a clean array for checking isValid input[3] += config._tzh || 0; input[4] += config._tzm || 0; date = new Date(0); if (config._useUTC) { date.setUTCFullYear(input[0], input[1], input[2]); date.setUTCHours(input[3], input[4], input[5], input[6]); } else { date.setFullYear(input[0], input[1], input[2]); date.setHours(input[3], input[4], input[5], input[6]); } config._d = date; } // date from string and format string function makeDateFromStringAndFormat(config) { // This array is used to make a Date, either with `new Date` or `Date.UTC` var tokens = config._f.match(formattingTokens), string = config._i, i, parsedInput; config._a = []; for (i = 0; i < tokens.length; i++) { parsedInput = (getParseRegexForToken(tokens[i]).exec(string) || [])[0]; if (parsedInput) { string = string.slice(string.indexOf(parsedInput) + parsedInput.length); } // don't parse if its not a known token if (formatTokenFunctions[tokens[i]]) { addTimeToArrayFromToken(tokens[i], parsedInput, config); } } // handle am pm if (config._isPm && config._a[3] < 12) { config._a[3] += 12; } // if is 12 am, change hours to 0 if (config._isPm === false && config._a[3] === 12) { config._a[3] = 0; } // return dateFromArray(config); } // date from string and array of format strings function makeDateFromStringAndArray(config) { var tempConfig, tempMoment, bestMoment, scoreToBeat = 99, i, currentDate, currentScore; while (config._f.length) { tempConfig = extend({}, config); tempConfig._f = config._f.pop(); makeDateFromStringAndFormat(tempConfig); tempMoment = new Moment(tempConfig); if (tempMoment.isValid()) { bestMoment = tempMoment; break; } currentScore = compareArrays(tempConfig._a, tempMoment.toArray()); if (currentScore < scoreToBeat) { scoreToBeat = currentScore; bestMoment = tempMoment; } } extend(config, bestMoment); } // date from iso format function makeDateFromString(config) { var i, string = config._i; if (isoRegex.exec(string)) { config._f = 'YYYY-MM-DDT'; for (i = 0; i < 4; i++) { if (isoTimes[i][1].exec(string)) { config._f += isoTimes[i][0]; break; } } if (parseTokenTimezone.exec(string)) { config._f += " Z"; } makeDateFromStringAndFormat(config); } else { config._d = new Date(string); } } function makeDateFromInput(config) { var input = config._i, matched = aspNetJsonRegex.exec(input); if (input === undefined) { config._d = new Date(); } else if (matched) { config._d = new Date(+matched[1]); } else if (typeof input === 'string') { makeDateFromString(config); } else if (isArray(input)) { config._a = input.slice(0); dateFromArray(config); } else { config._d = input instanceof Date ? new Date(+input) : new Date(input); } } /************************************ Relative Time ************************************/ // helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize function substituteTimeAgo(string, number, withoutSuffix, isFuture, lang) { return lang.relativeTime(number || 1, !!withoutSuffix, string, isFuture); } function relativeTime(milliseconds, withoutSuffix, lang) { var seconds = round(Math.abs(milliseconds) / 1000), minutes = round(seconds / 60), hours = round(minutes / 60), days = round(hours / 24), years = round(days / 365), args = seconds < 45 && ['s', seconds] || minutes === 1 && ['m'] || minutes < 45 && ['mm', minutes] || hours === 1 && ['h'] || hours < 22 && ['hh', hours] || days === 1 && ['d'] || days <= 25 && ['dd', days] || days <= 45 && ['M'] || days < 345 && ['MM', round(days / 30)] || years === 1 && ['y'] || ['yy', years]; args[2] = withoutSuffix; args[3] = milliseconds > 0; args[4] = lang; return substituteTimeAgo.apply({}, args); } /************************************ Week of Year ************************************/ // firstDayOfWeek 0 = sun, 6 = sat // the day of the week that starts the week // (usually sunday or monday) // firstDayOfWeekOfYear 0 = sun, 6 = sat // the first week is the week that contains the first // of this day of the week // (eg. ISO weeks use thursday (4)) function weekOfYear(mom, firstDayOfWeek, firstDayOfWeekOfYear) { var end = firstDayOfWeekOfYear - firstDayOfWeek, daysToDayOfWeek = firstDayOfWeekOfYear - mom.day(); if (daysToDayOfWeek > end) { daysToDayOfWeek -= 7; } if (daysToDayOfWeek < end - 7) { daysToDayOfWeek += 7; } return Math.ceil(moment(mom).add('d', daysToDayOfWeek).dayOfYear() / 7); } /************************************ Top Level Functions ************************************/ function makeMoment(config) { var input = config._i, format = config._f; if (input === null || input === '') { return null; } if (typeof input === 'string') { config._i = input = getLangDefinition().preparse(input); } if (moment.isMoment(input)) { config = extend({}, input); config._d = new Date(+input._d); } else if (format) { if (isArray(format)) { makeDateFromStringAndArray(config); } else { makeDateFromStringAndFormat(config); } } else { makeDateFromInput(config); } return new Moment(config); } moment = function (input, format, lang) { return makeMoment({ _i : input, _f : format, _l : lang, _isUTC : false }); }; // creating with utc moment.utc = function (input, format, lang) { return makeMoment({ _useUTC : true, _isUTC : true, _l : lang, _i : input, _f : format }); }; // creating with unix timestamp (in seconds) moment.unix = function (input) { return moment(input * 1000); }; // duration moment.duration = function (input, key) { var isDuration = moment.isDuration(input), isNumber = (typeof input === 'number'), duration = (isDuration ? input._data : (isNumber ? {} : input)), ret; if (isNumber) { if (key) { duration[key] = input; } else { duration.milliseconds = input; } } ret = new Duration(duration); if (isDuration && input.hasOwnProperty('_lang')) { ret._lang = input._lang; } return ret; }; // version number moment.version = VERSION; // default format moment.defaultFormat = isoFormat; // This function will load languages and then set the global language. If // no arguments are passed in, it will simply return the current global // language key. moment.lang = function (key, values) { var i; if (!key) { return moment.fn._lang._abbr; } if (values) { loadLang(key, values); } else if (!languages[key]) { getLangDefinition(key); } moment.duration.fn._lang = moment.fn._lang = getLangDefinition(key); }; // returns language data moment.langData = function (key) { if (key && key._lang && key._lang._abbr) { key = key._lang._abbr; } return getLangDefinition(key); }; // compare moment object moment.isMoment = function (obj) { return obj instanceof Moment; }; // for typechecking Duration objects moment.isDuration = function (obj) { return obj instanceof Duration; }; /************************************ Moment Prototype ************************************/ moment.fn = Moment.prototype = { clone : function () { return moment(this); }, valueOf : function () { return +this._d; }, unix : function () { return Math.floor(+this._d / 1000); }, toString : function () { return this.format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ"); }, toDate : function () { return this._d; }, toJSON : function () { return moment.utc(this).format('YYYY-MM-DD[T]HH:mm:ss.SSS[Z]'); }, toArray : function () { var m = this; return [ m.year(), m.month(), m.date(), m.hours(), m.minutes(), m.seconds(), m.milliseconds() ]; }, isValid : function () { if (this._isValid == null) { if (this._a) { this._isValid = !compareArrays(this._a, (this._isUTC ? moment.utc(this._a) : moment(this._a)).toArray()); } else { this._isValid = !isNaN(this._d.getTime()); } } return !!this._isValid; }, utc : function () { this._isUTC = true; return this; }, local : function () { this._isUTC = false; return this; }, format : function (inputString) { var output = formatMoment(this, inputString || moment.defaultFormat); return this.lang().postformat(output); }, add : function (input, val) { var dur; // switch args to support add('s', 1) and add(1, 's') if (typeof input === 'string') { dur = moment.duration(+val, input); } else { dur = moment.duration(input, val); } addOrSubtractDurationFromMoment(this, dur, 1); return this; }, subtract : function (input, val) { var dur; // switch args to support subtract('s', 1) and subtract(1, 's') if (typeof input === 'string') { dur = moment.duration(+val, input); } else { dur = moment.duration(input, val); } addOrSubtractDurationFromMoment(this, dur, -1); return this; }, diff : function (input, units, asFloat) { var that = this._isUTC ? moment(input).utc() : moment(input).local(), zoneDiff = (this.zone() - that.zone()) * 6e4, diff, output; if (units) { // standardize on singular form units = units.replace(/s$/, ''); } if (units === 'year' || units === 'month') { diff = (this.daysInMonth() + that.daysInMonth()) * 432e5; // 24 * 60 * 60 * 1000 / 2 output = ((this.year() - that.year()) * 12) + (this.month() - that.month()); output += ((this - moment(this).startOf('month')) - (that - moment(that).startOf('month'))) / diff; if (units === 'year') { output = output / 12; } } else { diff = (this - that) - zoneDiff; output = units === 'second' ? diff / 1e3 : // 1000 units === 'minute' ? diff / 6e4 : // 1000 * 60 units === 'hour' ? diff / 36e5 : // 1000 * 60 * 60 units === 'day' ? diff / 864e5 : // 1000 * 60 * 60 * 24 units === 'week' ? diff / 6048e5 : // 1000 * 60 * 60 * 24 * 7 diff; } return asFloat ? output : absRound(output); }, from : function (time, withoutSuffix) { return moment.duration(this.diff(time)).lang(this.lang()._abbr).humanize(!withoutSuffix); }, fromNow : function (withoutSuffix) { return this.from(moment(), withoutSuffix); }, calendar : function () { var diff = this.diff(moment().startOf('day'), 'days', true), format = diff < -6 ? 'sameElse' : diff < -1 ? 'lastWeek' : diff < 0 ? 'lastDay' : diff < 1 ? 'sameDay' : diff < 2 ? 'nextDay' : diff < 7 ? 'nextWeek' : 'sameElse'; return this.format(this.lang().calendar(format, this)); }, isLeapYear : function () { var year = this.year(); return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0; }, isDST : function () { return (this.zone() < moment([this.year()]).zone() || this.zone() < moment([this.year(), 5]).zone()); }, day : function (input) { var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay(); return input == null ? day : this.add({ d : input - day }); }, startOf: function (units) { units = units.replace(/s$/, ''); // the following switch intentionally omits break keywords // to utilize falling through the cases. switch (units) { case 'year': this.month(0); /* falls through */ case 'month': this.date(1); /* falls through */ case 'week': case 'day': this.hours(0); /* falls through */ case 'hour': this.minutes(0); /* falls through */ case 'minute': this.seconds(0); /* falls through */ case 'second': this.milliseconds(0); /* falls through */ } // weeks are a special case if (units === 'week') { this.day(0); } return this; }, endOf: function (units) { return this.startOf(units).add(units.replace(/s?$/, 's'), 1).subtract('ms', 1); }, isAfter: function (input, units) { units = typeof units !== 'undefined' ? units : 'millisecond'; return +this.clone().startOf(units) > +moment(input).startOf(units); }, isBefore: function (input, units) { units = typeof units !== 'undefined' ? units : 'millisecond'; return +this.clone().startOf(units) < +moment(input).startOf(units); }, isSame: function (input, units) { units = typeof units !== 'undefined' ? units : 'millisecond'; return +this.clone().startOf(units) === +moment(input).startOf(units); }, zone : function () { return this._isUTC ? 0 : this._d.getTimezoneOffset(); }, daysInMonth : function () { return moment.utc([this.year(), this.month() + 1, 0]).date(); }, dayOfYear : function (input) { var dayOfYear = round((moment(this).startOf('day') - moment(this).startOf('year')) / 864e5) + 1; return input == null ? dayOfYear : this.add("d", (input - dayOfYear)); }, isoWeek : function (input) { var week = weekOfYear(this, 1, 4); return input == null ? week : this.add("d", (input - week) * 7); }, week : function (input) { var week = this.lang().week(this); return input == null ? week : this.add("d", (input - week) * 7); }, // If passed a language key, it will set the language for this // instance. Otherwise, it will return the language configuration // variables for this instance. lang : function (key) { if (key === undefined) { return this._lang; } else { this._lang = getLangDefinition(key); return this; } } }; // helper for adding shortcuts function makeGetterAndSetter(name, key) { moment.fn[name] = moment.fn[name + 's'] = function (input) { var utc = this._isUTC ? 'UTC' : ''; if (input != null) { this._d['set' + utc + key](input); return this; } else { return this._d['get' + utc + key](); } }; } // loop through and add shortcuts (Month, Date, Hours, Minutes, Seconds, Milliseconds) for (i = 0; i < proxyGettersAndSetters.length; i ++) { makeGetterAndSetter(proxyGettersAndSetters[i].toLowerCase().replace(/s$/, ''), proxyGettersAndSetters[i]); } // add shortcut for year (uses different syntax than the getter/setter 'year' == 'FullYear') makeGetterAndSetter('year', 'FullYear'); // add plural methods moment.fn.days = moment.fn.day; moment.fn.weeks = moment.fn.week; moment.fn.isoWeeks = moment.fn.isoWeek; /************************************ Duration Prototype ************************************/ moment.duration.fn = Duration.prototype = { weeks : function () { return absRound(this.days() / 7); }, valueOf : function () { return this._milliseconds + this._days * 864e5 + this._months * 2592e6; }, humanize : function (withSuffix) { var difference = +this, output = relativeTime(difference, !withSuffix, this.lang()); if (withSuffix) { output = this.lang().pastFuture(difference, output); } return this.lang().postformat(output); }, lang : moment.fn.lang }; function makeDurationGetter(name) { moment.duration.fn[name] = function () { return this._data[name]; }; } function makeDurationAsGetter(name, factor) { moment.duration.fn['as' + name] = function () { return +this / factor; }; } for (i in unitMillisecondFactors) { if (unitMillisecondFactors.hasOwnProperty(i)) { makeDurationAsGetter(i, unitMillisecondFactors[i]); makeDurationGetter(i.toLowerCase()); } } makeDurationAsGetter('Weeks', 6048e5); /************************************ Default Lang ************************************/ // Set default language, other languages will inherit from English. moment.lang('en', { ordinal : function (number) { var b = number % 10, output = (~~ (number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; return number + output; } }); /************************************ Exposing Moment ************************************/ // CommonJS module is defined if (hasModule) { module.exports = moment; } /*global ender:false */ if (typeof ender === 'undefined') { // here, `this` means `window` in the browser, or `global` on the server // add `moment` as a global object via a string identifier, // for Closure Compiler "advanced" mode this['moment'] = moment; } /*global define:false */ if (typeof define === "function" && define.amd) { define("moment", [], function () { return moment; }); } }).call(this); ================================================ FILE: vendor/assets/stylesheets/tablesorter.theme.kochiku.css ================================================ /************* Kochiku table sorter Theme Ref: http://mottie.github.io/tablesorter/ version 2.8.13 Copied from the tablesorter default theme and deleted everything but header section Removed font, borter-bottom styling. *************/ /* header */ .tablesorter-default th, .tablesorter-default thead td { color: #000; background-color: #fff; border-collapse: collapse; padding: 0; } .tablesorter-default tfoot th, .tablesorter-default tfoot td { border: 0; } .tablesorter-default .header, .tablesorter-default .tablesorter-header { background-image: url(data:image/gif;base64,R0lGODlhFQAJAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAkAAAIXjI+AywnaYnhUMoqt3gZXPmVg94yJVQAAOw==); background-position: center right; background-repeat: no-repeat; cursor: pointer; white-space: normal; padding: 4px 20px 4px 4px; } .tablesorter-default thead .headerSortUp, .tablesorter-default thead .tablesorter-headerSortUp, .tablesorter-default thead .tablesorter-headerAsc { background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); } .tablesorter-default thead .headerSortDown, .tablesorter-default thead .tablesorter-headerSortDown, .tablesorter-default thead .tablesorter-headerDesc { background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); } .tablesorter-default thead .sorter-false { background-image: none; cursor: default; padding: 4px; } ================================================ FILE: vendor/assets/stylesheets/tipTip.scss ================================================ /* TipTip CSS - Version 1.2 */ #tiptip_holder { display: none; position: absolute; top: 0; left: 0; z-index: 99999; } #tiptip_holder.tip_top { padding-bottom: 5px; } #tiptip_holder.tip_bottom { padding-top: 5px; } #tiptip_holder.tip_right { padding-left: 5px; } #tiptip_holder.tip_left { padding-right: 5px; } #tiptip_content { font-size: 11px; color: #fff; text-shadow: 0 0 2px #000; padding: 4px 8px; border: 1px solid rgba(255,255,255,0.25); background-color: rgb(25,25,25); background-color: rgba(25,25,25,0.92); background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, from(transparent), to(#000)); border-radius: 3px; -webkit-border-radius: 3px; -moz-border-radius: 3px; box-shadow: 0 0 3px #555; -webkit-box-shadow: 0 0 3px #555; -moz-box-shadow: 0 0 3px #555; } #tiptip_arrow, #tiptip_arrow_inner { position: absolute; border-color: transparent; border-style: solid; border-width: 6px; height: 0; width: 0; } #tiptip_holder.tip_top #tiptip_arrow { border-top-color: #fff; border-top-color: rgba(255,255,255,0.35); } #tiptip_holder.tip_bottom #tiptip_arrow { border-bottom-color: #fff; border-bottom-color: rgba(255,255,255,0.35); } #tiptip_holder.tip_right #tiptip_arrow { border-right-color: #fff; border-right-color: rgba(255,255,255,0.35); } #tiptip_holder.tip_left #tiptip_arrow { border-left-color: #fff; border-left-color: rgba(255,255,255,0.35); } #tiptip_holder.tip_top #tiptip_arrow_inner { margin-top: -7px; margin-left: -6px; border-top-color: rgb(25,25,25); border-top-color: rgba(25,25,25,0.92); } #tiptip_holder.tip_bottom #tiptip_arrow_inner { margin-top: -5px; margin-left: -6px; border-bottom-color: rgb(25,25,25); border-bottom-color: rgba(25,25,25,0.92); } #tiptip_holder.tip_right #tiptip_arrow_inner { margin-top: -6px; margin-left: -5px; border-right-color: rgb(25,25,25); border-right-color: rgba(25,25,25,0.92); } #tiptip_holder.tip_left #tiptip_arrow_inner { margin-top: -6px; margin-left: -7px; border-left-color: rgb(25,25,25); border-left-color: rgba(25,25,25,0.92); } /* Webkit Hacks */ @media screen and (-webkit-min-device-pixel-ratio:0) { #tiptip_content { padding: 4px 8px 5px 8px; background-color: rgba(45,45,45,0.88); } #tiptip_holder.tip_bottom #tiptip_arrow_inner { border-bottom-color: rgba(45,45,45,0.88); } #tiptip_holder.tip_top #tiptip_arrow_inner { border-top-color: rgba(20,20,20,0.92); } }