[
  {
    "path": ".gitignore",
    "content": "/.bundle\nlog/*.log\ntmp/\npublic/uploads\npublic/log_files\nvendor/ruby\n\n# file with the real db config for production\n/config/database.production.yml\n\n# rspec\n/spec/examples.txt\n"
  },
  {
    "path": ".haml-lint.yml",
    "content": "linters:\n  ConsecutiveSilentScripts:\n    enabled: false\n\n  IdNames:\n    enabled: false\n\n  InlineStyles:\n    enabled: false\n\n  InstanceVariables:\n    enabled: false\n\n  ImplicitDiv:\n    enabled: false\n\n  LineLength:\n    enabled: false\n\n  RuboCop:\n    enabled: false\n\n  SpaceInsideHashAttributes:\n    style: no_space\n\n  ViewLength:\n    enabled: false\n"
  },
  {
    "path": ".rspec",
    "content": "--color\n\n"
  },
  {
    "path": ".rubocop.yml",
    "content": "inherit_from: .rubocop_todo.yml\nAllCops:\n  TargetRubyVersion: 2.3\n  TargetRailsVersion: 4.2\n  DisplayCopNames: true\n  Include:\n    - 'Capfile'\n    - 'Gemfile'\n    - 'Rakefile'\n    - 'config.ru'\n  Exclude:\n    - 'config/deploy.rb'\n    - 'db/migrate/*'\n    - 'db/schema.rb'\n    - 'lib/ext/activerecord/persistence.rb'\n    - 'vendor/**/*'\n\nLayout/EmptyLinesAroundBlockBody:\n  Enabled: false\n\nLayout/EmptyLinesAroundClassBody:\n  Enabled: false\n\nLayout/ExtraSpacing:\n  Enabled: false\n\nLayout/MultilineBlockLayout:\n  Exclude:\n    - 'spec/lib/partitioner/maven_spec.rb'\n\nLint/HandleExceptions:\n  Exclude:\n    - 'lib/git_repo.rb'\n\nLint/NestedMethodDefinition:\n  Exclude:\n    - 'app/jobs/enforce_timeouts_job.rb'\n\nLint/UselessAssignment:\n  Exclude:\n    - 'config/compass.rb'\n    - 'spec/models/build_spec.rb'\n\nMetrics/MethodLength:\n  Exclude:\n    - 'lib/partitioner/pants.rb'\n\nMetrics/ClassLength:\n  Enabled: false\n\nRails:\n  Enabled: true\n\nStyle/BlockDelimiters:\n  Enabled: false\n\nStyle/DoubleNegation:\n  Enabled: false\n\nStyle/FrozenStringLiteralComment:\n  # will enable this after Ruby 3.0 is released\n  Enabled: false\n\nStyle/GuardClause:\n  # Personally disagree with this one. In certain situations not using a\n  # GuardClause makes it more readable.\n  Enabled: false\n\nStyle/IfUnlessModifier:\n  Enabled: false\n\nStyle/MultilineBlockChain:\n  Enabled: false\n\nStyle/TrailingCommaInLiteral:\n  Enabled: false\n"
  },
  {
    "path": ".rubocop_todo.yml",
    "content": "# This configuration was generated by\n# `rubocop --auto-gen-config`\n# on 2017-12-08 16:49:35 -0800 using RuboCop version 0.51.0.\n# The point is for the user to remove these configuration records\n# one by one as the offenses are removed from the code base.\n# Note that changes in the inspected code, or installation of new\n# versions of RuboCop, may require this file to be generated again.\n\n# Offense count: 3\n# Cop supports --auto-correct.\nLayout/EmptyLineAfterMagicComment:\n  Exclude:\n    - 'lib/github_post_receive_hook.rb'\n    - 'lib/partitioner/maven.rb'\n    - 'spec/features/integration_spec.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, IndentationWidth.\n# SupportedStyles: special_inside_parentheses, consistent, align_braces\nLayout/IndentHash:\n  Exclude:\n    - 'app/views/branches/status_report.xml.builder'\n\n# Offense count: 8\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: auto_detection, squiggly, active_support, powerpack, unindent\nLayout/IndentHeredoc:\n  Exclude:\n    - 'spec/features/integration_spec.rb'\n    - 'spec/lib/git_blame_spec.rb'\n    - 'spec/lib/partitioner/maven_spec.rb'\n\n# Offense count: 11\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: symmetrical, new_line, same_line\nLayout/MultilineMethodCallBraceLayout:\n  Exclude:\n    - 'app/controllers/build_attempts_controller.rb'\n    - 'spec/controllers/pull_requests_controller_spec.rb'\n    - 'spec/jobs/build_partitioning_job_spec.rb'\n    - 'spec/jobs/build_state_update_job_spec.rb'\n    - 'spec/lib/github_commit_status_spec.rb'\n\n# Offense count: 11\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, IndentationWidth.\n# SupportedStyles: aligned, indented, indented_relative_to_receiver\nLayout/MultilineMethodCallIndentation:\n  Exclude:\n    - 'spec/lib/build_strategies/production_build_strategy_spec.rb'\n    - 'spec/lib/git_merge_executor_spec.rb'\n    - 'spec/lib/partitioner/maven_spec.rb'\n\n# Offense count: 10\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, SupportedStylesForEmptyBraces.\n# SupportedStyles: space, no_space\n# SupportedStylesForEmptyBraces: space, no_space\nLayout/SpaceBeforeBlockBraces:\n  Exclude:\n    - 'config/application.rb'\n    - 'spec/controllers/branches_controller_spec.rb'\n    - 'spec/controllers/build_artifacts_controller_spec.rb'\n    - 'spec/controllers/repositories_controller_spec.rb'\n    - 'spec/jobs/poll_repositories_job_spec.rb'\n    - 'spec/lib/remote_server_spec.rb'\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 245\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, EnforcedStyleForEmptyBraces, SupportedStylesForEmptyBraces.\n# SupportedStyles: space, no_space, compact\n# SupportedStylesForEmptyBraces: space, no_space\nLayout/SpaceInsideHashLiteralBraces:\n  Enabled: false\n\n# Offense count: 2\n# Cop supports --auto-correct.\nLayout/SpaceInsidePercentLiteralDelimiters:\n  Exclude:\n    - 'spec/models/repository_spec.rb'\n\n# Offense count: 5\nLint/AmbiguousBlockAssociation:\n  Exclude:\n    - 'spec/controllers/branches_controller_spec.rb'\n    - 'spec/controllers/build_parts_controller_spec.rb'\n    - 'spec/jobs/build_state_update_job_spec.rb'\n    - 'spec/jobs/enforce_timeouts_job_spec.rb'\n    - 'spec/jobs/poll_repositories_job_spec.rb'\n\n# Offense count: 1\n# Configuration parameters: AllowSafeAssignment.\nLint/AssignmentInCondition:\n  Exclude:\n    - 'app/decorators/branch_decorator.rb'\n\n# Offense count: 3\nLint/RescueWithoutErrorClass:\n  Exclude:\n    - 'app/controllers/build_attempts_controller.rb'\n    - 'app/jobs/job_base.rb'\n    - 'lib/remote_server/stash.rb'\n\n# Offense count: 6\n# Cop supports --auto-correct.\n# Configuration parameters: IgnoreEmptyBlocks, AllowUnusedKeywordArguments.\nLint/UnusedBlockArgument:\n  Exclude:\n    - 'app/controllers/application_controller.rb'\n    - 'spec/lib/git_blame_spec.rb'\n    - 'spec/models/build_part_spec.rb'\n    - 'spec/support/custom_argument_matchers.rb'\n    - 'spec/support/factories.rb'\n\n# Offense count: 10\n# Cop supports --auto-correct.\n# Configuration parameters: AllowUnusedKeywordArguments, IgnoreEmptyMethods.\nLint/UnusedMethodArgument:\n  Exclude:\n    - 'app/decorators/branch_decorator.rb'\n    - 'app/helpers/build_helper.rb'\n    - 'app/jobs/build_attempt_job.rb'\n    - 'lib/partitioner/default.rb'\n    - 'lib/remote_server/stash.rb'\n    - 'spec/mailers/previews/build_mailer_preview.rb'\n\n# Offense count: 48\nMetrics/AbcSize:\n  Max: 58\n\n# Offense count: 128\n# Configuration parameters: CountComments, ExcludedMethods.\nMetrics/BlockLength:\n  Max: 550\n\n# Offense count: 7\n# Configuration parameters: CountComments.\nMetrics/ClassLength:\n  Max: 232\n\n# Offense count: 8\nMetrics/CyclomaticComplexity:\n  Max: 17\n\n# Offense count: 1110\n# Configuration parameters: AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, IgnoredPatterns.\n# URISchemes: http, https\nMetrics/LineLength:\n  Max: 562\n\n# Offense count: 58\n# Configuration parameters: CountComments.\nMetrics/MethodLength:\n  Max: 46\n\n# Offense count: 7\nMetrics/PerceivedComplexity:\n  Max: 19\n\n# Offense count: 1\n# Configuration parameters: Blacklist.\n# Blacklist: END, (?-mix:EO[A-Z]{1})\nNaming/HeredocDelimiterNaming:\n  Exclude:\n    - 'app/models/build.rb'\n\n# Offense count: 2\n# Configuration parameters: NamePrefix, NamePrefixBlacklist, NameWhitelist, MethodDefinitionMacros.\n# NamePrefix: is_, has_, have_\n# NamePrefixBlacklist: is_, has_, have_\n# NameWhitelist: is_a?\n# MethodDefinitionMacros: define_method, define_singleton_method\nNaming/PredicateName:\n  Exclude:\n    - 'spec/**/*'\n    - 'app/helpers/build_helper.rb'\n    - 'app/models/build.rb'\n\n# Offense count: 10\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: snake_case, camelCase\nNaming/VariableName:\n  Exclude:\n    - 'lib/remote_server/stash.rb'\n    - 'spec/controllers/repositories_controller_spec.rb'\n    - 'spec/models/repository_spec.rb'\n\n# Offense count: 18\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: snake_case, normalcase, non_integer\nNaming/VariableNumber:\n  Exclude:\n    - 'spec/controllers/builds_controller_spec.rb'\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 3\n# Cop supports --auto-correct.\nPerformance/RegexpMatch:\n  Exclude:\n    - 'lib/git_blame.rb'\n    - 'lib/remote_server/github.rb'\n    - 'lib/remote_server/stash.rb'\n\n# Offense count: 4\n# Cop supports --auto-correct.\n# Configuration parameters: NilOrEmpty, NotPresent, UnlessPresent.\nRails/Blank:\n  Exclude:\n    - 'app/jobs/build_state_update_job.rb'\n    - 'app/models/repository.rb'\n    - 'lib/git_blame.rb'\n\n# Offense count: 1\n# Cop supports --auto-correct.\n# Configuration parameters: Whitelist.\n# Whitelist: find_by_sql\nRails/DynamicFindBy:\n  Exclude:\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 4\nRails/FilePath:\n  Exclude:\n    - 'app/controllers/status_controller.rb'\n    - 'db/seeds.rb'\n    - 'spec/lib/build_strategies/production_build_strategy_spec.rb'\n    - 'spec/spec_helper.rb'\n\n# Offense count: 2\nRails/OutputSafety:\n  Exclude:\n    - 'app/helpers/build_helper.rb'\n    - 'app/helpers/mail_helper.rb'\n\n# Offense count: 18\n# Configuration parameters: Blacklist.\n# Blacklist: decrement!, decrement_counter, increment!, increment_counter, toggle!, touch, update_all, update_attribute, update_column, update_columns, update_counters\nRails/SkipsModelValidations:\n  Exclude:\n    - 'app/models/build.rb'\n    - 'app/models/build_attempt.rb'\n    - 'app/models/build_part.rb'\n    - 'spec/controllers/pull_requests_controller_spec.rb'\n    - 'spec/features/integration_spec.rb'\n    - 'spec/helpers/project_stats_helper_spec.rb'\n    - 'spec/mailers/build_mailer_spec.rb'\n    - 'spec/models/build_part_spec.rb'\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 2\n# Configuration parameters: Environments.\n# Environments: development, test, production\nRails/UnknownEnv:\n  Exclude:\n    - 'app/models/repository_observer.rb'\n    - 'config/initializers/readthis.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\nSecurity/YAMLLoad:\n  Exclude:\n    - 'lib/git_repo.rb'\n    - 'lib/settings_accessor.rb'\n\n# Offense count: 92\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: braces, no_braces, context_dependent\nStyle/BracesAroundHashParameters:\n  Exclude:\n    - 'app/views/branches/show.rss.builder'\n    - 'app/views/branches/status_report.xml.builder'\n    - 'db/seeds.rb'\n    - 'lib/git_repo.rb'\n    - 'lib/remote_server/stash.rb'\n    - 'spec/controllers/pull_requests_controller_spec.rb'\n    - 'spec/lib/git_blame_spec.rb'\n    - 'spec/lib/partitioner/maven_spec.rb'\n    - 'spec/lib/partitioner/shared_default_behavior.rb'\n    - 'spec/mailers/build_mailer_spec.rb'\n    - 'spec/models/build_part_spec.rb'\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 1\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, SingleLineConditionsOnly, IncludeTernaryExpressions.\n# SupportedStyles: assign_to_condition, assign_inside_condition\nStyle/ConditionalAssignment:\n  Exclude:\n    - 'app/helpers/build_helper.rb'\n\n# Offense count: 52\nStyle/Documentation:\n  Enabled: false\n\n# Offense count: 1\n# Cop supports --auto-correct.\nStyle/EmptyCaseCondition:\n  Exclude:\n    - 'app/models/build.rb'\n\n# Offense count: 4\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: compact, expanded\nStyle/EmptyMethod:\n  Exclude:\n    - 'app/jobs/build_attempt_job.rb'\n    - 'lib/build_strategies/no_op_build_strategy.rb'\n\n# Offense count: 1\n# Cop supports --auto-correct.\nStyle/Encoding:\n  Exclude:\n    - 'spec/features/integration_spec.rb'\n\n# Offense count: 1\n# Configuration parameters: SupportedStyles.\n# SupportedStyles: annotated, template\nStyle/FormatStringToken:\n  EnforcedStyle: template\n\n# Offense count: 738\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, UseHashRocketsWithSymbolValues, PreferHashRocketsForNonAlnumEndingSymbols.\n# SupportedStyles: ruby19, hash_rockets, no_mixed_keys, ruby19_no_mixed_keys\nStyle/HashSyntax:\n  Enabled: false\n\n# Offense count: 1\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: keyword, braces\nStyle/MultilineMemoization:\n  Exclude:\n    - 'lib/partitioner/dependency_map.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, MinBodyLength, SupportedStyles.\n# SupportedStyles: skip_modifier_ifs, always\nStyle/Next:\n  Exclude:\n    - 'app/jobs/enforce_timeouts_job.rb'\n    - 'lib/tasks/kochiku.rake'\n\n# Offense count: 16\n# Cop supports --auto-correct.\n# Configuration parameters: Strict.\nStyle/NumericLiterals:\n  MinDigits: 11\n\n# Offense count: 6\n# Cop supports --auto-correct.\n# Configuration parameters: AutoCorrect, EnforcedStyle, SupportedStyles.\n# SupportedStyles: predicate, comparison\nStyle/NumericPredicate:\n  Exclude:\n    - 'spec/**/*'\n    - 'app/models/build.rb'\n    - 'lib/git_repo.rb'\n    - 'lib/partitioner/dependency_map.rb'\n\n# Offense count: 1\n# Cop supports --auto-correct.\nStyle/ParallelAssignment:\n  Exclude:\n    - 'app/helpers/project_stats_helper.rb'\n\n# Offense count: 68\n# Cop supports --auto-correct.\n# Configuration parameters: PreferredDelimiters.\nStyle/PercentLiteralDelimiters:\n  Exclude:\n    - 'config/deploy/production.rb'\n    - 'db/seeds.rb'\n    - 'lib/partitioner/dependency_map.rb'\n    - 'spec/controllers/branches_controller_spec.rb'\n    - 'spec/helpers/application_helper_spec.rb'\n    - 'spec/jobs/build_state_update_job_spec.rb'\n    - 'spec/lib/github_commit_status_spec.rb'\n    - 'spec/lib/partitioner/dependency_map_spec.rb'\n    - 'spec/lib/partitioner/maven_spec.rb'\n    - 'spec/lib/partitioner/shared_default_behavior.rb'\n    - 'spec/lib/settings_accessor_spec.rb'\n    - 'spec/models/build_spec.rb'\n    - 'spec/models/repository_spec.rb'\n\n# Offense count: 3\n# Cop supports --auto-correct.\nStyle/Proc:\n  Exclude:\n    - 'app/mailers/build_mailer.rb'\n    - 'app/mailers/merge_mailer.rb'\n    - 'config/application.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\nStyle/RedundantBegin:\n  Exclude:\n    - 'lib/git_merge_executor.rb'\n    - 'lib/github_post_receive_hook.rb'\n\n# Offense count: 4\n# Cop supports --auto-correct.\n# Configuration parameters: AllowMultipleReturnValues.\nStyle/RedundantReturn:\n  Exclude:\n    - 'app/models/branch.rb'\n    - 'lib/partitioner/default.rb'\n    - 'lib/remote_server/stash.rb'\n\n# Offense count: 12\n# Cop supports --auto-correct.\nStyle/RedundantSelf:\n  Exclude:\n    - 'app/models/branch.rb'\n    - 'app/models/build.rb'\n    - 'app/models/repository.rb'\n\n# Offense count: 4\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, AllowInnerSlashes.\n# SupportedStyles: slashes, percent_r, mixed\nStyle/RegexpLiteral:\n  Exclude:\n    - 'app/controllers/repositories_controller.rb'\n    - 'app/helpers/build_helper.rb'\n    - 'config/routes.rb'\n    - 'spec/models/build_spec.rb'\n\n# Offense count: 5\n# Cop supports --auto-correct.\n# Configuration parameters: ConvertCodeThatCanStartToReturnNil.\nStyle/SafeNavigation:\n  Exclude:\n    - 'app/models/build.rb'\n    - 'app/models/build_attempt.rb'\n    - 'app/models/build_part.rb'\n    - 'lib/settings_accessor.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\nStyle/SelfAssignment:\n  Exclude:\n    - 'lib/git_blame.rb'\n    - 'lib/partitioner/default.rb'\n\n# Offense count: 1906\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, ConsistentQuotesInMultiline.\n# SupportedStyles: single_quotes, double_quotes\nStyle/StringLiterals:\n  Enabled: false\n\n# Offense count: 2\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles.\n# SupportedStyles: single_quotes, double_quotes\nStyle/StringLiteralsInInterpolation:\n  Exclude:\n    - 'lib/remote_server/stash.rb'\n\n# Offense count: 14\n# Cop supports --auto-correct.\n# Configuration parameters: MinSize, SupportedStyles.\n# SupportedStyles: percent, brackets\nStyle/SymbolArray:\n  EnforcedStyle: brackets\n\n# Offense count: 6\n# Cop supports --auto-correct.\n# Configuration parameters: IgnoredMethods.\n# IgnoredMethods: respond_to, define_method\nStyle/SymbolProc:\n  Exclude:\n    - 'app/controllers/dashboards_controller.rb'\n    - 'app/helpers/project_stats_helper.rb'\n    - 'app/models/branch.rb'\n    - 'app/models/build.rb'\n    - 'db/seeds.rb'\n    - 'lib/partitioner/maven.rb'\n\n# Offense count: 2\n# Cop supports --auto-correct.\n# Configuration parameters: EnforcedStyle, SupportedStyles, AllowSafeAssignment.\n# SupportedStyles: require_parentheses, require_no_parentheses, require_parentheses_when_complex\nStyle/TernaryParentheses:\n  Exclude:\n    - 'app/models/build.rb'\n\n# Offense count: 1\n# Cop supports --auto-correct.\n# Configuration parameters: AllowNamedUnderscoreVariables.\nStyle/TrailingUnderscoreVariable:\n  Exclude:\n    - 'app/mailers/build_mailer.rb'\n\n# Offense count: 18\n# Cop supports --auto-correct.\n# Configuration parameters: SupportedStyles, WordRegex.\n# SupportedStyles: percent, brackets\nStyle/WordArray:\n  EnforcedStyle: percent\n  MinSize: 3\n"
  },
  {
    "path": ".ruby-version",
    "content": "2.4.3\n"
  },
  {
    "path": ".travis.yml",
    "content": "sudo: false\ncache: bundler\nlanguage: ruby\nrvm:\n  - 2.4.4\n  - 2.5.1\nbefore_install:\n  # some ruby versions come with a broken version of rubygems, update to\n  # consistent version\n  - gem update --system 2.7.6\n  - gem install bundler -v '>= 1.16.1'\nscript:\n  - RAILS_ENV=test bundle exec rake --trace db:create db:migrate\n  - bundle exec rspec\n  - bundle exec rubocop\n  - bundle exec haml-lint app/views/\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "Contributing\n============\n\nIf you would like to contribute code to Kochiku, thank you! You can do so through\nGitHub by forking the repository and sending a pull request. However,\nbefore your code can be accepted into the project we need you to sign Square's (super\nsimple) [Individual Contributor License Agreement (CLA)][1].\n\n[1]: https://spreadsheets.google.com/spreadsheet/viewform?formkey=dDViT2xzUHAwRkI3X3k5Z0lQM091OGc6MQ&ndplr=1\n"
  },
  {
    "path": "Capfile",
    "content": "# Load DSL and Setup Up Stages\nrequire 'capistrano/setup'\n\n# Includes default deployment tasks\nrequire 'capistrano/deploy'\n\n# Includes tasks from other gems included in your Gemfile\nrequire 'capistrano/bundler'\nrequire 'capistrano/rails'\n\n# If you would like to use a Ruby version manager with kochiku\n# require it from a .cap file in lib/capistrano/tasks/.\n#\n# For more information see:\n# http://capistranorb.com/documentation/frameworks/rbenv-rvm-chruby/\n\n# Loads custom tasks from `lib/capistrano/tasks' if you have any defined.\nDir.glob('lib/capistrano/tasks/*.cap').sort.each { |r| import r }\n"
  },
  {
    "path": "Gemfile",
    "content": "source 'https://rubygems.org'\n\ngem 'actionpack-action_caching', '> 1.1.1'\n\ngem 'activemodel-serializers-xml' # required for xml serialization\ngem 'rails-controller-testing' # Included to provide 'assigns' method\n\ngem 'dynamic_form'\ngem 'passenger', '~> 4.0.41', group: :production\ngem 'rails', '~> 5.1.0'\ngem 'rails-observers'\n\ngem 'carrierwave'\ngem 'draper', '~> 3.0.1'\ngem 'mysql2', '>= 0.4.4'\n\ngem 'compass-rails'\ngem 'haml-rails'\ngem 'jquery-rails'\ngem 'sass-rails'\ngem 'uglifier'\n\n# therubyracer is a JS runtime required by execjs, which is in turn required\n# by uglifier. therubyracer is not the fastest option but it is the most portable.\ngem 'therubyracer'\n\ngem 'hiredis' # better Redis performance for usage as cache\ngem 'readthis'\ngem 'redis', require: [\"redis\", \"redis/connection/hiredis\"]\n\ngem 'resque', '~> 1.27.4'\ngem 'resque-retry'\ngem 'resque-scheduler', require: false\n\ngem 'json' # used by resque\n\ngem 'awesome_print', require: false\ngem 'chunky_png'\ngem 'cocaine'\ngem 'nokogiri', '~> 1.8', '>= 1.8.4' # 1.8.3 and below have known vulnerabilities\ngem 'posix-spawn'  # used by cocaine\n\ngem 'pry-byebug'\ngem 'pry-rails'\n\ngroup :test, :development do\n  gem 'factory_bot_rails'\n  gem 'haml_lint', require: false\n  gem 'rspec-collection_matchers'\n  gem 'rspec-rails', '~> 3.0'\n  gem 'rubocop', require: false\nend\n\ngroup :development do\n  gem 'bullet'\n  gem 'capistrano', '~> 3.0', require: false\n  gem 'capistrano-bundler', '~> 1.1', require: false\n  gem 'capistrano-rails', '~> 1.1', require: false\n  gem 'capistrano-rvm', '~> 0.1', require: false\n  gem 'rails-erd'\n  gem 'thin'\nend\n\ngroup :test do\n  gem 'capybara', '~> 2.3'\n  gem 'fakeredis', :require => \"fakeredis/rspec\"\n  gem 'webmock', require: false\nend\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "Kochiku - Distributed tests made easy\n=====================================\n\nKochiku is a distributed platform for test automation. It has three main components:\n\n- A **web server**, which lets you inspect builds and manage repositories\n- **Background jobs** that divide builds into distributable parts\n- **Workers** that run individual parts of a build\n\nA single machine typically runs the web server and background jobs, whereas many machines run workers.\n\nUse Kochiku to distribute large test suites quickly and easily. It's language agnostic; Use it for Ruby, Rails, Node.js, Ember, Java, C, C++ or anything else that runs in a unix environment.\n\n### Git integration\n\nKochiku currently integrates with git repositories stored in Github (including Github Enterprise) or Atlassian Bitbucket (formerly known as Stash). This lets Kochiku automatically run test suites for pull requests and commits to the master branch. Kochiku can also build any git revision on request.\n\nSupport for headless git servers is coming soon.\n\n## User Guide\n\n- [Installation & Deployment](https://github.com/square/kochiku/wiki/Installation-&-Deployment)\n- [Adding a repository](https://github.com/square/kochiku/wiki/How-to-add-a-repository-to-Kochiku)\n- [Initiating a build](https://github.com/square/kochiku/wiki/How-to-initiate-a-build-on-Kochiku)\n- [Hacking on Kochiku](https://github.com/square/kochiku/wiki/Hacking-on-Kochiku)\n- [Changelog](https://github.com/square/kochiku/wiki/CHANGELOG)\n- [Additional documentation](https://github.com/square/kochiku/wiki/_pages)\n"
  },
  {
    "path": "Rakefile",
    "content": "# Add your own tasks in files placed in lib/tasks ending in .rake,\n# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.\n\nrequire File.expand_path('../config/application', __FILE__)\nrequire 'rake'\n\nKochiku::Application.load_tasks\n"
  },
  {
    "path": "app/assets/javascripts/application.js",
    "content": "//= require jquery\n//= require jquery_ujs\n//= require jquery.tipTip\n//= require jquery.tablesorter\n//= require jquery.timeago\n//= require jquery.flot\n//= require jquery.flot.errorbars\n//= require jquery.flot.categories\n//= require moment\n\n//= require_self\n\nmoment.lang('en', {\n  calendar: {\n    sameDay: 'h:mma',\n    lastDay: 'ddd ha',\n    lastWeek: 'ddd',\n    sameElse: 'M/D'\n  }\n});\n\nKochiku = {};\n\nStartTimes = {};\n\nKochiku.delayedRefresh = function(updateInfo) {\n  var now = new Date();\n  $(updateInfo.table).find('tr:has(.running)').each( function() {\n      var startTime = new Date(Date.parse(StartTimes[$(this).data('id')]));\n      $(this).find('.elapsed').text(\n        Math.floor((now-startTime)/60000) + \":\" + (\"00\" + (Math.round((now-startTime)/1000)%60)).slice(-2));\n  });\n  setTimeout(function() {\n    if($('input#refresh').is(':checked')) {\n      $.getJSON(document.URL + '/modified_time', function( data ) {\n        var buildTime = Date.parse(data);\n        var renderTime = updateInfo.renderTime;\n        if(buildTime > renderTime) {\n          Kochiku.buildInfo.renderTime = buildTime;\n          //keep the updated at display up to date\n          var timeAgo = new Date(renderTime).toISOString();\n          var updateDisplay = $(\"#time-since-update\");\n          updateDisplay.timeago(\"update\", timeAgo);\n          updateDisplay.prop(\"title\", timeAgo);\n          //ajax in changed parts\n          Kochiku.updateBuildParts(renderTime);\n        } else if ($(\".queue-position-value\").length > 0) {\n          // if there are displayed queue positions refresh\n          Kochiku.updateBuildParts(renderTime);\n        }\n      });\n      Kochiku.delayedRefresh(Kochiku.buildInfo);\n    }\n  }, 5000);\n};\n\njQuery(document).ready(function() {\n  jQuery(\"abbr.timeago\").timeago();\n});\n\nKochiku.updateBuildParts = function(renderTime) {\n  $.getJSON(document.URL + '/refresh_build_part_info', { modified_time: renderTime }, function( data ) {\n    $.each(data,function(index, el) {\n      if (el.state != Kochiku.buildInfo.state) {\n        if ( $.inArray(el.state, Kochiku.terminalStates) != -1) {\n        switch(el.state) {\n          case \"succeeded\":\n          case \"passed\":\n            status = \"✅ \" + el.state;\n            break;\n          case \"failed\":\n            status = \"🚫 \" + el.state;\n            break;\n          default:\n            status = el.state;\n        }\n        Kochiku.notify(status + \" \" + Kochiku.doneMessage + Kochiku.buildInfo.repo + \"/\" + Kochiku.buildInfo.branch);\n        }\n        window.location.reload();\n      }\n      var row;\n      row = $(Kochiku.buildInfo.table).find(\"[data-id='\" + el.id + \"']\");\n      if (row) {\n        row.replaceWith(el.content);\n      }\n    });\n    //reload the table after its updated\n    $(\"table.tablesorter\").trigger(\"update\", [true]);\n  });\n}\n\nKochiku.notify = function(message) {\n  options = {body: message}\n  if ( \"Notification\" in window && Notification.permission !== \"denied\") {\n    Notification.requestPermission(function (permission) {\n      if (permission === \"granted\") {\n        var notification = new Notification(\"Kochiku Build Status\", options);\n      }\n    });\n  }\n};\n\nKochiku.graphBuildTimes = function(repositoryPath, branchName) {\n  var url = '/' + repositoryPath + '/' + branchName + '/build-time-history',\n    colors = {\n      cucumber:     'hsl(87,  63%, 47%)',\n      spec:         'hsl(187, 63%, 47%)',\n      jasmine:      'hsl(27,  63%, 47%)',\n      maven:        'hsl(207, 63%, 47%)',\n      unit:         'hsl(187, 63%, 47%)',\n      integration:  'hsl(87,  63%, 47%)',\n      acceptance:   'hsl(207, 63%, 47%)'\n    };\n\n  $.getJSON(url, function(data) {\n    var plot = $('#plot'),\n      series = [];\n    for (var label in data) {\n      var points = data[label].slice(-20),\n        lastTime = null;\n      for (var i = 0; i < points.length; i++) {\n        var ref = $('<a>')\n              .attr('href', '/' + repositoryPath + '/builds/' + points[i][4])\n              .attr('class', 'build-status ' + points[i][5])\n              .text(points[i][0]).wrap('<div>'),\n                time = moment(points[i][6]).calendar().replace(/m$/,'');\n        if (time != lastTime) {\n          ref.after($('<time>').text(time));\n          lastTime = time;\n        }\n        points[i][0] = ref.parent().html();\n      }\n      series.push({\n        label: label,\n        data: points,\n        color: colors[label]\n      });\n    }\n\n    $.plot(plot, series, {\n      xaxis: {\n        mode: 'categories',\n        color: 'transparent'\n      },\n      yaxis: {\n        color: '#f3f3f3'\n      },\n      points: {\n        show: true,\n        lineWidth: 1.5,\n        radius: 3,\n        shadowSize: 0,\n        errorbars: 'y',\n        yerr: {\n          show: true,\n          asymmetric: true,\n          lineWidth: 1,\n          lowerCap: '-'\n        }\n      },\n      grid: {\n        borderWidth: 0,\n        clickable: true,\n        labelMargin: 20,\n        margin: {\n          left: 20\n        }\n      },\n      legend: {\n        show: true,\n        position: 'nw',\n        noColumns: series.length\n      }\n    });\n\n    $('<div class=\"axis-label y\">')\n      .text('Minutes (Min to Max)')\n      .appendTo(plot);\n  });\n};\n\n(function() {\n  var statuses = [\n    'Errored', 'Aborted', 'Failed', 'Running', 'Runnable', 'Passed'\n  ];\n\n  $.tablesorter.addParser({\n    id:     'state',\n    type:   'numeric',\n    is:     function(s) { return statuses.indexOf(s) !== -1; },\n    format: function(s) { return statuses.indexOf(s.replace(/^\\s+|\\s+$/g, '')); }\n  });\n})();\n\nfunction timeToSeconds(time) {\n  var timeArr = time.split(':')\n  if (timeArr.length === 2) {\n    return Number(timeArr[0])*60 + Number(timeArr[1])\n  } else {\n    return Number(timeArr[0])*3600 + Number(timeArr[1])*60 + Number(timeArr[2])\n  }\n}\n\n(function() {\n  $.tablesorter.addParser({\n    id:     'elapsedTime',\n    type:   'numeric',\n    is:     function(s) { return /^([0-9]*:(?=[0-9]{2}:))?([0-5]?[0-9])(:[0-5][0-9])$/.test(s); },\n    format: function(s) { return timeToSeconds(s); }\n  });\n})();\n"
  },
  {
    "path": "app/assets/stylesheets/screen.sass",
    "content": "@import compass\n@import compass/reset\n@import compass/css3/border-radius\n\n/* SQ Market font\n@font-face\n  font-family: \"SQ Market\"\n  src: url(\"/fonts/SQMarket-Regular.otf\") format(\"opentype\")\n\n/* Variables\n$font-base: Helvetica Neue, Helvetica, Arial, sans-serif\n$font-code: Inconsolata, Menlo, Monaco, monospace\n$color-base: #52585D\n$color-title: #52585D\n$color-link: #2D81C5\n\n$color-passed: hsl(90, 90%, 70%)\n$color-runnable: hsl(216, 40%, 90%)\n$color-running: hsl(216, 90%, 70%)\n$color-failed: hsl(0, 90%, 70%)\n$color-errored: hsl(48, 90%, 70%)\n$color-aborted: hsl(0, 0%, 70%)\n\n/* Mixins\n=has-layout\n  display: block\n  zoom: 1\n\n=envelope\n  +has-layout\n\n  position: relative\n  overflow: hidden\n\n=safe-envelope\n  +has-layout\n\n  &:after\n    content: \" \"\n    display: block\n    height: 0\n    clear: both\n    overflow: hidden\n    visibility: hidden\n\n=text-shadow\n  text-shadow: 0 1px 0 rgba(0, 0, 0, 0.5)\n\n=box-sizing($box-sizing: border-box)\n  box-sizing: $box-sizing\n  -webkit-box-sizing: $box-sizing\n  -moz-box-sizing: $box-sizing\n\n=border-radius($radius: 0)\n  -webkit-border-radius: $radius\n  -moz-border-radius: $radius\n  border-radius: $radius\n\n/* Styles\nhtml, body\n  height: 100%\n\nbody, input\n  font-family: $font-base\n  font-size: 12px\n\n#page\n  margin: 1em 40px\n  min-height: 100%\n\nh1, h2, h3, h4, h5, h6\n  font-weight: 300\n\na\n  color: $color-link\n  text-decoration: none\n\nstrong\n  font-weight: bold\n\ncode\n  font-family: $font-code\n\ninput[type=text],\ninput[type=email],\ninput[type=password]\n  font-family: $font-base\n  outline: none !important\n  width: 400px\n  border-radius: 3px\n  -webkit-box-shadow: 0 2px 2px rgba(black, 0.1) inset\n  -moz-box-shadow: 0 2px 2px rgba(black, 0.1) inset\n  border: solid 1px rgba(black, 0.3)\n  padding: 4px\n  color: #202122\n\n  &:focus\n    border-color: hsla(207, 63%, 47%, 0.6)\n\ninput[type=submit], button, .button\n  +border-radius(3px)\n\n  box-shadow: 0 1px 0 rgba(white, 0.1) inset, 0 0 3px rgba(black, 0.2)\n  border: solid 1px rgba(black, 0.2)\n  background: $color-link linear-gradient(to bottom, transparent, rgba(black, 0.2))\n  color: white\n  text-shadow: 0 -1px 0 rgba(black, 0.2)\n  padding: 4px 8px\n\n  a\n    color: white\n\n  &:active\n    box-shadow: 0 1px 3px rgba(black, 0.2) inset\n    color: rgba(white, 0.95)\n\n// Tooltip overrides\n#tiptip_content\n  font-size: 14px\n\n/* Index\n.section-wrapper\n  overflow: hidden\n  border-bottom: solid 1px #e2e2e2\n\n.section\n  +envelope\n\n  margin: 1em\n\n.flash\n  text-align: center\n  font-size: 14px\n  padding: 10px\n  border-radius: 12px\n  margin-bottom: 10px\n  font-weight: bold\n\n  &.message\n    background-color: #91f991\n\n  &.error\n    background-color: #ff9999\n\n  &.warn\n    background-color: #edfa3f\n\n#disabled-repo-alert\n  color: #8a6d3b\n  background-color: #fcf8e3\n  border-color: #faebcc\n  font-size: 14px\n\n#header\n  a.logo\n    float: left\n\n    h1\n      font-family: \"SQ Market\", $font-base\n      font-size: 28px\n      height: 37px\n      color: $color-title + #666\n\n      b\n        color: $color-title\n\n      .translation\n        font-size: 14px\n\n  ul.links\n    display: inline-block\n\n    li\n      display: inline-block\n      margin-right: 1em\n\n  form\n    display: inline-block\n\n#nav\n  padding-bottom: 30px\n\nh2.subheader\n  color: $color-title + #222\n  font-size: 175%\n  padding: 0 0 25px\n\n  .info, .actions\n    font-size: 12px\n    font-weight: normal\n\n  .actions\n    float: right\n    padding-top: 5px\n\n  form\n    display: inline\n\n  label\n    margin-left: 1em\n    min-width: 0\n\nh3\n  color: $color-title + #222\n  font-size: 125%\n  padding: 0 0 10px\n\n.info + .info\n  margin-left: 8px\n  padding-left: 8px\n  border-left: solid 1px rgba(0, 0, 0, 0.3)\n\n  form\n    display: inline-block\n\n.log_contents\n  font-family: monospace\n  min-height: 300px\n  white-space: pre-wrap\n\n.performance\n  font-size: 1.1em\n  float: right\n  width: 300px\n  text-align: right\n\n  .great\n    color: $color-passed\n\n  .decent\n    color: $color-errored\n\n  .bad\n    color: $color-failed\n\n  .label\n    cursor: help\n\n.build-stats\n  margin-bottom: 2em\n  width: 700px\n\n#plot\n  margin-bottom: 2em\n  min-width: 1000px\n  height: 180px\n\n  .legend table\n    width: auto\n\n    td\n      padding: 2px\n\n  .xAxis\n    text-align: center\n    font-size: 12px\n\n    a\n      font-family: $font-code\n\n    .build-status\n      position: relative\n\n    .build-status:before\n      position: absolute\n      left: 50%\n      top: -15px\n      width: 8px\n      height: 8px\n      font-size: 8px\n      margin: 0\n      margin-left: -5px\n      border-radius: 2px\n\n    time\n      display: block\n      font-size: 10px\n      margin-top: 2px\n\n  .axis-label\n    position: absolute\n    color: #666\n\n    &.y\n      -webkit-transform: rotate(-90deg)\n      left: -45px\n      top: 45%\n\n  .flot-overlay, .flot-y-axis\n    // make the x-axis clickable\n    z-index: -1\n\n#tooltip\n  position: absolute\n  display: none\n  border: 1px solid #52585D\n  padding: 2px\n  background-color: #B8BEC3\n  font-size: 130%\n  opacity: 0.8\n\n.build-state\n  .state\n    font-weight: bold\n\n  .queue\n    font-weight: bold\n\n.build\n  overflow: hidden\n  margin: 0 0 50px\n\n  .build-wrapper\n    +envelope\n\n    color: #666\n    display: block\n\n    .build-info\n      float: left\n\n      .ref\n        font-size: 175%\n        font-family: $font-code\n        line-height: 40px\n        letter-spacing: 1px\n\n        &:hover\n          color: $color-link\n\n    .times\n      float: right\n      margin: 10px 20px\n      line-height: 15px\n      text-align: right\n\n    .build-id\n      +text-shadow\n\n      font-family: $font-base\n      float: right\n      font-size: 400%\n      font-weight: bold\n      text-align: right\n\n$part-margin: 2px\n\n.parts\n  +envelope\n\n  margin: 10px (-$part-margin)\n\n  .part\n    overflow: hidden\n    float: left\n    border: 1px solid rgba(0, 0, 0, 0.1)\n    border-radius: 3px\n    margin: $part-margin\n    box-shadow: 0 1px 1px rgba(0, 0, 0, 0.3)\n\n  .part.runnable\n    background-color: $color-runnable\n\n  .part.running\n    background-color: $color-running\n\n  .part.passed\n    background-color: $color-passed\n\n  .part.failed\n    background-color: $color-failed\n\n  .part.errored\n    background-color: $color-errored\n\n  .part.aborted\n    background-color: $color-aborted\n\n  .part-wrapper\n    +envelope\n\n    display: block\n    margin: 5px\n    width: 115px\n    height: 25px\n    color: #333\n\n    .kind, .attempts\n      display: inline-block\n\n    .kind\n      font-size: 175%\n      font-weight: bold\n      line-height: 27px\n\n.attempts\n  .attempt\n    overflow: hidden\n\n.paths-tooltip\n  border: 1px solid rgba(0, 0, 0, 0.5)\n  background-color: #fff\n  color: #333\n  font-size: 120%\n  font-family: $font-code\n  max-width: 500px !important\n\n  li\n    margin: 5px 0\n\n.build-status, .part-status, .attempt-status\n  &:before\n    content: \" \"\n    border: 1px solid rgba(black, 0.4)\n    border-radius: 3px\n    display: inline-block\n    width: 10px\n    height: 10px\n    text-align: center\n    margin-right: 2px\n    box-shadow: 0 1px 1px rgba(black, 0.1)\n    font-size: 10px\n    font-family: verdana, sans-serif\n    font-weight: bold\n    color: rgba(black, 0.4)\n\n  &.runnable:before, &.partitioning:before\n    background-color: $color-runnable\n\n  &.running:before\n    background-color: $color-running\n\n  &.failed:before, &.errored:before, &.aborted:before, &.doomed:before\n    content: \"!\"\n\n  &.failed:before, &.doomed:before\n    background-color: $color-failed\n\n  &.errored:before\n    background-color: $color-errored\n\n  &.aborted:before\n    background-color: $color-aborted\n\n  &.passed:before, &.succeeded:before\n    background-color: $color-passed\n\nh2 .build-status:before\n  vertical-align: middle\n\n/* New build in header\n.header-right\n  float: right\n  line-height: 32px\n  margin-top: 5px\n\n  input.ref\n    margin: 0\n    width: 200px\n    font-size: 100%\n\n/* New build page\ninput.ref\n  width: 500px\n  font-family: $font-code\n  font-size: 150%\n\ninput.build-button\n  font-size: 120%\n\ninput[disabled]\n  background-color: #e8e8e8\n  background-image: linear-gradient(#fcfcfc,#e8e8e8)\n  border: 1px solid #e8e8e8\n  border-top-color: #e8e8e8\n  color: #999\n\n/* health pretends to be a button\n.health\n  float: right\n  margin: 8px 5px\n\ntable\n  table-layout: fixed\n  width: 100%\n\n  tr:nth-child(even)\n    background-color: rgba(0, 0, 0, 0.05)\n\n  tbody tr:hover\n    background-color: #e4edfa !important\n\n  th\n    font-weight: bold\n    border-bottom: 1px solid rgba(0, 0, 0, 0.1)\n    padding: 0 5px 5px\n    cursor: pointer\n    white-space: nowrap\n\n    &.headerSortDown:after\n      content: \" ▲\"\n      font-size: 8px\n\n    &.headerSortUp:after\n      content: \" ▼\"\n      font-size: 8px\n\n    code\n      font-size: 15px\n\n    &.id\n      width: 50px\n\n    &.whisker\n      width: 45px\n\n    &.queue-position\n      width: 50px\n\n    &.status\n      width: 70px\n\n    &.ruby-version\n      width: 100px\n\n    &.type\n      width: 156px\n\n    &.worker\n      width: 156px\n\n    &.time\n      width: 80px\n\n    &.count\n      width: 50px\n\n    &.actions\n      width: 70px\n\n    &.first-path\n      width: 100px\n\n    &.date\n      width: 180px\n\n  td\n    padding: 5px\n    white-space: nowrap\n    overflow-x: scroll\n    /* Fix chrome 67 formating issue */\n    position: relative\n\n    &::-webkit-scrollbar\n      display: none\n\n    &.whisker\n      padding-right: 2px\n\n      .part-status:before\n        content: \"\"\n        width: 1px\n        height: 10px\n        margin-right: -1px\n        border-color: rgba(black, 0.2)\n\n      .part-status.attempt-0:before\n        height: 1px\n        border-color: transparent\n        -webkit-box-shadow: none\n\n      .part-status.attempt-1:before\n        height: 1px\n\n      .part-status.attempt-2:before\n        height: 3px\n\n      .part-status.attempt-3:before\n        height: 5px\n\n      .part-status.attempt-4:before\n        height: 7px\n\n      .part-status.attempt-5:before\n        height: 9px\n\n  th.right, td.right\n    text-align: right\n\n  td .paths\n    color: #999\n\n    .root\n      color: black\n\n  td.wrap\n    white-space: normal\n    line-height: 1.6\n\n  tr:target td\n    background-color: #ffc\n\n/* Search by build revision\n.select_commit\n  margin-bottom: 20px\n\n#build_ref_input\n  width: 100px\n\n/* Build show\n.build-info-subheader\n  margin: (-15px) 0 25px\n\n.build-summary\n  margin-bottom: 10px\n\n/* Build parts show\ntable.build-part-info\n  margin-bottom: 20px\n\nol#build-paths\n  margin-bottom: 20px\n  list-style: square inside none\n  line-height: 150%\n\n.new-repository-link\n  padding-bottom: 20px\n\n.projects-grid\n  text-align: center\n\n.projects\n  +pie-clearfix\n\n  margin-right: 0.75em\n  margin-left: 0.75em\n\n  li.build-info\n    margin: 0.3em 0\n\n    &.bold\n      font-weight: bold\n\n  .ci-build-info\n    position: relative\n    display: inline-block\n    width: 188px\n    height: 188px\n    border-radius: 10px\n    text-align: center\n    vertical-align: top\n    margin: 0.5em\n    color: rgba(black, 0.8)\n    text-shadow: rgba(white, 0.1) 0 1px 0\n\n    +box-shadow(0 1px 2px rgba(black, 0.5), inset 0 0 3px rgba(black, 0.3))\n\n    a\n      color: rgba(white, 0.95)\n      text-shadow: rgba(black, 0.2) 0 -1px 0\n\n    .project-name\n      font-size: 26px\n      font-weight: 300\n\n      a\n        display: block\n        padding: 55px 0 10px\n        letter-spacing: 0.05em\n\n    .state\n      padding-top: 6px\n\n    .project-link\n      display: none\n      position: absolute\n      bottom: 10px\n      width: 100%\n\n    &:hover\n      .project-link\n        display: block\n\n.ci-errored, .ci-doomed, .ci-failed\n  background-color: desaturate(darken($color-failed, 10%), 20%)\n  font-weight: bold\n\n.ci-partitioning, .ci-runnable, .ci-running\n  background-color: desaturate(darken($color-running, 10%), 20%)\n\n.ci-succeeded\n  background-color: desaturate(darken($color-passed, 20%), 20%)\n\n.ci-unknown, .ci-aborted\n  background-color: desaturate(darken($color-aborted, 10%), 20%)\n  font-weight: bold\n\n/* Repository form styles\n\nfieldset\n  legend\n    font-weight: bold\n\n  padding: 0.5em 0\n  margin: 1em 0\n\n#repository-form label\n  display: inline-block\n  width: 220px\n\n// Give more spacing to the checkboxes\n#repository-form > div, #repository-form fieldset > div\n  min-height: 28px\n\n#repository-form input[type='checkbox']\n  margin-top: 8px\n\n#repository-form .short\n  width: 150px\n\n.delete-form\n  float: right\n  margin-top: -28px\n\n.danger-button\n  background-color: $color-failed !important\n\n#branch-delete-warning\n  color: #B8BEC3\n\n/* END Repository form styles\n\n.build-error\n  pre\n    font-family: monospace\n    background-color: rgba(0, 0, 0, 0.05)\n    padding: 1em\n    margin-bottom: 2em\n\n  h2\n    padding: 0.5em\n    font-size: 1.5em\n    font-weight: bolder\n    color: darken($color-failed, 20%)\n\n.build-empty\n  div\n    display: block\n    padding: 1.2em\n    margin-bottom: 2em\n    background-color: rgba(0, 0, 0, 0.05)\n\n  h2\n    padding: 0.5em\n    font-size: 1.5em\n    font-weight: bolder\n\n.hint\n  font-style: italic\n\np\n  line-height: 1.4em\n\n#worker-health-wrap\n  overflow: auto\n\n.worker-health\n  table-layout: auto\n\n// Firefox doesn't support a fixed table with\n// a scroll overflow, so just hide the overflow on Firefox.\n// It also doesn't react to just overflow-x, we need to hide\n// overflow in both axes\n@-moz-document url-prefix()\n  table\n    td\n      overflow: hidden\n"
  },
  {
    "path": "app/controllers/application_controller.rb",
    "content": "class ApplicationController < ActionController::Base\n  include BuildHelper\n\n  rescue_from ActiveRecord::RecordNotFound do |exception|\n    render file: \"#{Rails.public_path}/404.html\", layout: false, status: 404\n  end\nend\n"
  },
  {
    "path": "app/controllers/branches_controller.rb",
    "content": "class BranchesController < ApplicationController\n  caches_action :show, :build_time_history, cache_path: proc {\n    load_repository_and_branch\n    { :modified => [@branch.updated_at.to_i, @repository.updated_at.to_i].max }\n  }\n\n  caches_action :status_report, expires_in: 15.seconds\n\n  # lists all convergence branches as well the 100 most recently active\n  # branches\n  def index\n    load_repository\n    @convergence_branches = @repository.branches.where(convergence: true)\n    @recently_active_branches = @repository.branches.where(convergence: false).order('updated_at DESC').limit(100)\n  end\n\n  def show\n    load_repository_and_branch\n    @build = @branch.builds.build\n    @builds = @branch.builds.includes(build_parts: :build_attempts).last(12)\n    @current_build = @builds.last\n\n    @build_parts = {}\n    @builds.reverse_each do |build|\n      build.build_parts.each do |build_part|\n        key = [build_part.paths.first, build_part.kind, build_part.options['ruby']]\n        (@build_parts[key] ||= {})[build] = build_part\n      end\n    end\n\n    @branch = @branch.decorate\n\n    respond_to do |format|\n      format.html\n      # Note: appending .rss to the end of the url will not return RSS format\n      # due to the very permissive branch id constraint. Instead users will\n      # have to specify a query param of format=rss to receive the RSS feed.\n      format.rss { @builds = @builds.reverse } # most recent first\n      format.json\n    end\n  end\n\n  def request_new_build\n    load_repository_and_branch\n\n    ref = @repository.sha_for_branch(@branch.name)\n\n    existing_build = @repository.build_for_commit(ref)\n\n    if existing_build.present?\n      flash[:warn] = \"Did not find a new commit on the #{@branch.name} branch to build\"\n      redirect_to repository_branch_path(@repository, @branch)\n    else\n      build = @branch.builds.build(ref: ref, state: 'partitioning')\n\n      if build.save\n        flash[:message] = \"New build started for #{build.ref} on #{@branch.name}\"\n        redirect_to repository_build_path(@repository, build)\n      else\n        flash[:error] = \"Error adding build! #{build.errors.full_messages.to_sentence}\"\n        redirect_to repository_branch_path(@repository, @branch)\n      end\n    end\n  end\n\n  def health\n    load_repository_and_branch\n    initialize_stats_variables\n    load_build_stats\n\n    @builds = @branch.builds.includes(:build_parts => :build_attempts).last(params[:count] || 12)\n\n    build_part_attempts = Hash.new(0)\n    build_part_failures = Hash.new(0)\n    failed_parts = {}\n    @builds.each do |build|\n      build.build_parts.each do |build_part|\n        key = [build_part.paths.sort, build_part.kind]\n        build_part.build_attempts.each do |build_attempt|\n          if build_attempt.successful?\n            build_part_attempts[key] = build_part_attempts[key] + 1\n          elsif build_attempt.unsuccessful?\n            build_part_attempts[key] = build_part_attempts[key] + 1\n            build_part_failures[key] = build_part_failures[key] + 1\n            failed_parts[key] = (failed_parts[key] || []) << build_part\n          end\n        end\n      end\n    end\n\n    @part_climate = {}\n    failed_parts.each do |key, parts|\n      part_error_rate = (build_part_failures[key] * 100 / build_part_attempts[key])\n      @part_climate[[part_error_rate, key]] = parts.uniq\n    end\n\n    @branch = @branch.decorate\n  end\n\n  def build_time_history\n    load_repository_and_branch\n\n    respond_to do |format|\n      format.json do\n        render json: @branch.decorate.build_time_history.to_json\n      end\n    end\n  end\n\n  # GET /XmlStatusReport.aspx\n  #\n  # This action returns the current build status for all of the convergence branches\n  # in the system\n  def status_report\n    @branches = Branch.includes(:repository).where(convergence: true).decorate\n  end\n\n  def badge\n    @repository || load_repository\n    @branch ||= @repository.branches.where(name: params[:branch]).first!\n    build = @branch.most_recent_build\n    if build.succeeded?\n      send_file('public/images/passing.svg', type: 'image/svg+xml', disposition: 'inline')\n    elsif build.failed?\n      send_file('public/images/failing.svg', type: 'image/svg+xml', disposition: 'inline')\n    else # in progress\n      send_file('public/images/pending.svg', type: 'image/svg+xml', disposition: 'inline')\n    end\n  end\n\n  private\n\n  def load_repository\n    r_namespace, r_name = params[:repository_path].split('/')\n    @repository = Repository.where(namespace: r_namespace, name: r_name).first!\n  end\n\n  def load_repository_and_branch\n    @repository || load_repository\n    @branch ||= @repository.branches.where(name: params[:id]).first!\n  end\n\n  # set the various stats variables to reasonable null values in case\n  # the load_build_stats method short-circuits\n  def initialize_stats_variables\n    @days_since_first_build = 0\n    @total_build_count = 0\n    @total_failure_count = 0\n    @total_pass_rate = '—'\n    @last30_build_count = 0\n    @last30_failure_count = 0\n    @last30_pass_rate = '—'\n    @last7_build_count = 0\n    @last7_failure_count = 0\n    @last7_pass_rate = '—'\n  end\n\n  def load_build_stats\n    @first_built_date = @branch.builds.first.try(:created_at)\n    return if @first_built_date.nil?\n\n    @days_since_first_build = (Time.zone.today - @first_built_date.to_date).to_i\n\n    @total_build_count = @branch.builds.count\n    @total_failure_count = @branch.builds.where.not(state: 'succeeded').count\n    @total_pass_rate = (@total_build_count - @total_failure_count) * 100 / @total_build_count\n\n    @last30_build_count = @branch.builds.where('created_at >= ?', Time.zone.today - 30.days).count\n    return if @last30_build_count.zero?\n    @last30_failure_count = @last30_build_count - @branch.builds.where('state = \"succeeded\" AND created_at >= ?', Time.zone.today - 30.days).count\n    @last30_pass_rate = (@last30_build_count - @last30_failure_count) * 100 / @last30_build_count\n\n    @last7_build_count = @branch.builds.where('created_at >= ?', Time.zone.today - 7.days).count\n    return if @last7_build_count.zero?\n    @last7_failure_count = @last7_build_count - @branch.builds.where('state = \"succeeded\" AND created_at >= ?', Time.zone.today - 7.days).count\n    @last7_pass_rate = (@last7_build_count - @last7_failure_count) * 100 / @last7_build_count\n  end\nend\n"
  },
  {
    "path": "app/controllers/build_artifacts_controller.rb",
    "content": "class BuildArtifactsController < ApplicationController\n\n  def create\n    @build_artifact = BuildArtifact.new\n    @build_artifact.build_attempt_id = params[:build_attempt_id]\n    @build_artifact.log_file = params[:build_artifact].try(:[], :log_file)\n\n    respond_to do |format|\n      if @build_artifact.save\n        format.xml  { head :created, :location => @build_artifact.log_file.url }\n      else\n        format.xml  { render :xml => @build_artifact.errors, :status => :unprocessable_entity }\n      end\n    end\n  end\n\n  # A redirect is preferable to direct linking if logs are stored remotely with expiring urls.\n  def show\n    build_artifact = BuildArtifact.find(params[:id])\n\n    redirect_to build_artifact.log_file.url\n  end\nend\n"
  },
  {
    "path": "app/controllers/build_attempts_controller.rb",
    "content": "require 'json'\nrequire 'net/http'\n\nclass BuildAttemptsController < ApplicationController\n\n  def start\n    @build_attempt = BuildAttempt.find(params[:id])\n\n    respond_to do |format|\n      if @build_attempt.aborted?\n        format.json { render :json => @build_attempt }\n      elsif @build_attempt.start!(params[:builder])\n        @build_attempt.log_streamer_port = params[:logstreamer_port]\n        @build_attempt.instance_type = params[:instance_type] if params[:instance_type].present?\n        @build_attempt.save\n        format.json { render :json => @build_attempt }\n      else\n        format.json { render :json => @build_attempt.errors, :status => :unprocessable_entity }\n      end\n    end\n  end\n\n  def finish\n    @build_attempt = BuildAttempt.find(params[:id])\n\n    respond_to do |format|\n      if @build_attempt.finish!(params[:state])\n        format.json { head :ok }\n        format.html do\n          redirect_to repository_build_part_url(@build_attempt.build_part.build_instance.repository,\n                                                @build_attempt.build_part.build_instance,\n                                                @build_attempt.build_part)\n        end\n      else\n        format.json { render :json => @build_attempt.errors, :status => :unprocessable_entity }\n      end\n    end\n  end\n\n  # Redirects to the build_part page since we don't have a page for a single build attempt.\n  # Added as a shortcut method to use when the IDs of the relation chain is not handy.\n  def show\n    @build_attempt = BuildAttempt.find(params[:id])\n\n    redirect_to repository_build_part_url(\n      @build_attempt.build_part.build_instance.repository,\n      @build_attempt.build_part.build_instance,\n      @build_attempt.build_part,\n      anchor: helpers.dom_id(@build_attempt))\n  end\n\n  def stream_logs\n    @build_attempt = BuildAttempt.find(params[:id])\n    unless @build_attempt.log_streamer_port || @build_attempt.builder\n      render plain: \"No log streaming available for this build attempt\", status: 404\n    end\n\n    # if full log has already been uploaded, redirect there\n    if (stdout_log = @build_attempt.build_artifacts.stdout_log.try(:first))\n      redirect_to stdout_log\n      return\n    end\n\n    @build = @build_attempt.build_instance\n    @repository = @build.repository\n    @build_part = @build_attempt.build_part\n  end\n\n  # basically proxies request to the appropriate worker\n  def stream_logs_chunk\n    @build_attempt = BuildAttempt.find(params[:id])\n    start = params.fetch(:start, 0)\n    max_bytes = params.fetch(:maxBytes, 250000)\n\n    port = @build_attempt.log_streamer_port\n    builder = @build_attempt.builder\n    if !port || !builder\n      render json: {\"error\" => \"No log streaming available for this build attempt\"}, status: 500\n      return\n    end\n\n    # logstreamer_base_url = \"http://#{builder}:#{port}\"\n\n    http = Net::HTTP.new(builder, port)\n    http.read_timeout = 5\n\n    response = begin\n                 http.get(\"/build_attempts/#{@build_attempt.id}/log/stdout.log?start=#{start}&maxBytes=#{max_bytes}\")\n               rescue\n                 false\n               end\n\n    if !response || response.code !~ /^2/\n      render json: {\"error\" => \"unable to reach log streamer\"}, status: 500\n      return\n    end\n\n    output_json = JSON.parse(response.body)\n    output_json['state'] = @build_attempt.state\n    render json: output_json\n  end\nend\n"
  },
  {
    "path": "app/controllers/build_parts_controller.rb",
    "content": "class BuildPartsController < ApplicationController\n  before_action :load_repository_build_and_part, only: [:rebuild, :show, :modified_time, :refresh_build_part_info]\n  before_action only: [:show, :refresh_build_part_info] do\n    calculate_build_attempts_position(@build_part.build_attempts, @build_part.queue)\n  end\n\n  include BuildAttemptsQueuePosition\n\n  caches_action :show, cache_path: proc {\n    {\n      modified: [@build_part.updated_at.to_i, @repository.updated_at.to_i].max,\n      queue_position: Digest::SHA1.hexdigest(@build_attempts_rank.values.join(','))\n    }\n  }\n\n  def show\n    respond_to do |format|\n      format.html\n      format.json do\n        render :json => @build_part, include: { build_attempts: { methods: :files } }\n      end\n    end\n  end\n\n  def rebuild\n    begin\n      @build_part.rebuild!\n    rescue GitRepo::RefNotFoundError\n      flash[:error] = \"It appears the commit #{@build.ref} no longer exists.\"\n    end\n\n    redirect_to [@repository, @build]\n  end\n\n  def modified_time\n    respond_to do |format|\n      format.json do\n        render :json => @build_part.updated_at\n      end\n    end\n  end\n\n  def refresh_build_part_info\n    updates = []\n    if @build_part.finished_at\n      updates << { state: @build_part.status }\n    else\n      @build_part.build_attempts.each_with_index do |attempt, index|\n        html = ApplicationController.render(partial: 'build_parts/build_attempts', locals: {  index: index,\n                                                                                              attempt: attempt,\n                                                                                              build_attempts_rank: @build_attempts_rank})\n        updates << {id: index, content: html, state: @build_part.status}\n      end\n    end\n    respond_to do |format|\n      format.json do\n        render :json => updates\n      end\n    end\n  end\n\n  private\n\n  def load_repository_build_and_part\n    r_namespace, r_name = params[:repository_path].split('/')\n    @repository = Repository.where(namespace: r_namespace, name: r_name).first!\n    @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:build_id])\n    @build_part = @build.build_parts.find(params[:id])\n  end\nend\n"
  },
  {
    "path": "app/controllers/builds_controller.rb",
    "content": "require 'git_repo'\n\nclass BuildsController < ApplicationController\n  before_action :load_repository, :only => [:show, :retry_partitioning, :rebuild_failed_parts, :request_build, :abort, :toggle_merge_on_success, :build_status, :modified_time, :refresh_build_part_info, :resend_status]\n  before_action only: [:show, :refresh_build_part_info] do\n    @build = Build.includes(build_parts: :build_attempts)\n                  .joins(:branch_record).where('branches.repository_id' => @repository.id)\n                  .find(params[:id])\n    calculate_build_parts_position(@build)\n    format_build_parts_position\n  end\n\n  include BuildAttemptsQueuePosition\n\n  caches_action :show, cache_path: proc {\n    updated_at = Build.select(:updated_at).find(params[:id]).updated_at\n    {\n      build_modified: [updated_at.to_i, @repository.updated_at.to_i].max,\n      queue_position: Digest::SHA1.hexdigest(@build_attempts_rank.values.join(','))\n    }\n  }\n\n  def show\n    respond_to do |format|\n      format.html\n      format.json { render :json => @build, include: { build_parts: { methods: [:status] } } }\n      format.png do\n        # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.21\n        headers['Expires'] = CGI.rfc1123_date(Time.now.utc)\n\n        send_data(@build.to_png, :type => 'image/png', :disposition => 'inline')\n      end\n    end\n  end\n\n  # Public: Kickoff a build from the kochiku CLI script\n  #\n  # repo_url         - The remote url for the git repository\n  # git_sha          - (optional) the SHA of the specific git commit the user is requesting to build\n  # git_branch       - String name of the git branch to perform the build of. If\n  #                    'git_sha' is not specified then it will use HEAD of the git branch.\n  # merge_on_success - Bool. Request kochiku automatically merge the branch if the build succeeds.\n  #\n  def create\n    merge_on_success = (params[:merge_on_success] || false)\n    repository = Repository.lookup_by_url(params[:repo_url])\n    unless repository\n      raise ActiveRecord::RecordNotFound, \"Repository for #{params[:repo_url]} not found\"\n    end\n\n    if params[:git_sha].present?\n      build = repository.build_for_commit(params[:git_sha])\n      if build\n        head :ok, :location => repository_build_url(repository, build)\n        return\n      end\n    end\n\n    branch = repository.branches.where(name: params[:git_branch]).first_or_create!\n\n    ref_to_build = if params[:git_sha].present?\n                     params[:git_sha]\n                   else\n                     repository.sha_for_branch(branch.name)\n                   end\n\n    build = branch.builds.build(ref: ref_to_build, state: 'partitioning', merge_on_success: merge_on_success)\n\n    if build.save\n      head :ok, :location => repository_build_url(repository, build)\n    else\n      render :plain => build.errors.full_messages.join('\\n'), :status => :unprocessable_entity\n    end\n  end\n\n  def retry_partitioning\n    @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id])\n\n    # This means there was an error with the partitioning job; redo it\n    if @build.build_parts.empty?\n      @build.update_attributes! :state => 'partitioning', :error_details => nil\n      @build.enqueue_partitioning_job\n    end\n\n    redirect_to [@repository, @build]\n  end\n\n  def rebuild_failed_parts\n    @build = Build.includes(build_parts: :build_attempts)\n                  .joins(:branch_record).where('branches.repository_id' => @repository.id)\n                  .find(params[:id])\n    @build.build_parts.failed_errored_or_aborted.each do |part|\n      # There is an exceptional case in Kochiku where a build part's prior attempt may have\n      # passed but the latest attempt failed. We do not want to rebuild those parts.\n      part.rebuild! if part.unsuccessful?\n    end\n    @build.update_attributes! state: 'running'\n\n    redirect_to [@repository, @build]\n  end\n\n  def abort\n    @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id])\n    @build.abort!\n    redirect_to repository_build_path(@repository, @build)\n  end\n\n  def toggle_merge_on_success\n    @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id])\n    @build.update_attributes!(:merge_on_success => params[:merge_on_success])\n    redirect_to repository_build_path(@repository, @build)\n  end\n\n  def build_status\n    @build = @repository ? @repository.builds.find(params[:id]) : Build.find(params[:id])\n\n    respond_to do |format|\n      format.json do\n        render :json => @build\n      end\n    end\n  end\n\n  def modified_time\n    updated_at = Build.joins(:branch_record).where('branches.repository_id' => @repository.id)\n                      .find(params[:id]).updated_at\n    respond_to do |format|\n      format.json do\n        render :json => updated_at\n      end\n    end\n  end\n\n  def resend_status\n    @build = Build.joins(:branch_record).where('branches.repository_id' => @repository.id).find(params[:id])\n    BuildStateUpdateJob.enqueue(@build.id)\n    redirect_to repository_build_path(@repository, @build)\n  end\n\n  def refresh_build_part_info\n    updates = []\n    last_modified = Time.zone.at(params[:modified_time].to_i / 1000.0)\n    if @build.completed?\n      updates << { state: @build.state }\n    else\n      updatd_parts = @build.build_parts.where(\"updated_at > ? OR id in (?)\", last_modified, @build_parts_position.keys)\n      updatd_parts.each do |part|\n        html = ApplicationController.render(partial: 'builds/build_parts', locals: { part: part.decorate,\n                                                                                     build: @build,\n                                                                                     build_parts_position: @build_parts_position,\n                                                                                     repository: @repository })\n        updates << {id: part.id, content: html, state: @build.state}\n      end\n    end\n    respond_to do |format|\n      format.json do\n        render :json => updates\n      end\n    end\n  end\n\n  def build_redirect\n    build_instance = Build.find(params[:id])\n    redirect_to repository_build_path(build_instance.repository, build_instance)\n  end\n\n  def build_ref_redirect\n    # search prefix so that entire git ref does not have to be provided.\n    build_instance = Build.where(\"ref LIKE ?\", \"#{params[:ref]}%\").first\n    redirect_to repository_build_path(build_instance.repository, build_instance)\n  end\n\n  private\n\n  def load_repository\n    if params[:repository_path]\n      r_namespace, r_name = params[:repository_path].split('/')\n      @repository = Repository.where(namespace: r_namespace, name: r_name).first!\n    end\n  end\n\n  def format_build_parts_position\n    @build_parts_position = {}\n    @build_attempts_rank&.each do |build_attempt_id, position|\n      next if position.nil?\n      build_part_id = BuildAttempt.find(build_attempt_id).build_part.id\n      if @build_parts_position[build_part_id].nil?\n        @build_parts_position[build_part_id] = position\n      elsif @build_parts_position[build_part_id] > position\n        @build_parts_position[build_part_id] = position\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "app/controllers/concerns/build_attempts_queue_position.rb",
    "content": "module BuildAttemptsQueuePosition\n  extend ActiveSupport::Concern\n\n  # keep_rank is only true if we are calling calculate_build_attempts_position multiple times on the\n  # same build because that build has multiple queues\n  def calculate_build_attempts_position(build_attempts, queue, keep_rank: false)\n    @build_attempts_rank = {} unless keep_rank\n    jobs = Resque.redis.lrange(\"queue:#{queue}\", 0, -1)\n    return if jobs.blank?\n    build_attempts&.each do |build_attempt|\n      next unless build_attempt.state == 'runnable'\n      id = build_attempt.id.to_s\n      @build_attempts_rank[id] = jobs.index { |job| /\"build_attempt_id\\\":#{id}/.match(job) }\n    end\n  end\n\n  def calculate_build_parts_position(build)\n    @build_attempts_rank = {}\n    parts_by_queue = Hash.new([])\n    build_attempts = build.build_attempts.includes(:build_part).where(state: 'runnable')\n    build_attempts.each do |attempt|\n      parts_by_queue[attempt.build_part.queue] += [attempt]\n    end\n\n    parts_by_queue.each do |queue, attempts|\n      calculate_build_attempts_position(attempts, queue, keep_rank: true)\n    end\n  end\nend\n"
  },
  {
    "path": "app/controllers/dashboards_controller.rb",
    "content": "class DashboardsController < ApplicationController\n\n  def build_history_by_worker\n    build_attempts = BuildAttempt.where(\"builder IS NOT NULL\").order('id DESC').limit(params[:count] || 2000).select(:id, :builder, :state)\n\n    @workers = build_attempts.group_by { |ba| ba.builder }\n\n    @partition_jobs = Build.order('id DESC').limit(150).select(:id, :state).includes(:build_parts)\n  end\n\nend\n"
  },
  {
    "path": "app/controllers/pull_requests_controller.rb",
    "content": "require 'remote_server'\n\nclass PullRequestsController < ApplicationController\n  def build\n    if params['payload']\n      # from stash\n      handle_stash_request(JSON.parse(params['payload']))\n    else\n      # from github\n      handle_github_request(params)\n    end\n    render json: {message: \"Thanks!\"}\n  end\n\n  def handle_stash_request(payload)\n    @repo = get_repo(payload['repository']['url'])\n\n    if payload['pull_request'] && active_pull_request?(payload['action'])\n      branch_name = get_branch_name(payload['pull_request']['head']['ref'])\n      sha = payload['pull_request']['head']['sha']\n      handle_pull_request(branch_name, sha)\n    elsif payload['ref']\n      branch_name = get_branch_name(payload['ref'])\n      sha = payload['after']\n      handle_repo_push_request(branch_name, sha)\n    end\n  end\n\n  def handle_github_request(payload)\n    @repo = get_repo(payload['repository']['ssh_url'])\n\n    pull_request = payload['pull_request']\n    if payload['pull_request'] && active_pull_request?(pull_request['state'])\n      branch_name = get_branch_name(pull_request['head']['ref'])\n      sha = pull_request['head']['sha']\n      handle_pull_request(branch_name, sha)\n    elsif payload['ref']\n      branch_name = get_branch_name(payload['ref'])\n      sha = payload['head_commit']['id']\n      handle_repo_push_request(branch_name, sha)\n    end\n  end\n\n  private\n\n  def get_repo(url)\n    Repository.lookup_by_url(url)\n  end\n\n  def handle_repo_push_request(branch_name, sha)\n    return unless @repo\n\n    if @repo.run_ci?\n      branch = fetch_branch(branch_name)\n      branch.kickoff_new_build_unless_currently_busy(sha) if branch.present? && branch.convergence?\n    end\n  end\n\n  def handle_pull_request(branch_name, sha)\n    return unless @repo\n\n    if @repo.build_pull_requests\n      branch = fetch_branch(branch_name, true)\n      build = @repo.ensure_build_exists(sha, branch)\n      branch.abort_in_progress_builds_behind_build(build)\n    end\n  end\n\n  def get_branch_name(ref)\n    ref.sub(%r{\\Arefs/heads/}, '')\n  end\n\n  def fetch_branch(name, auto_create = false)\n    auto_create ? @repo.branches.where(name: name).first_or_create! : @repo.branches.where(name: name).first\n  end\n\n  def active_pull_request?(action)\n    action && action != \"closed\"\n  end\nend\n"
  },
  {
    "path": "app/controllers/repositories_controller.rb",
    "content": "class RepositoriesController < ApplicationController\n\n  def create\n    if params.fetch(:repository)[:url].blank?\n      redirect_to new_repository_path, error: \"Missing required value: Repository URL\"\n      return\n    end\n\n    @repository = Repository.new(repository_params)\n\n    # persist the repository and then create initial Branch records for the\n    # convergence branches\n    if @repository.save && update_convergence_branches\n      redirect_to repository_branches_path(@repository)\n    else\n      @current_convergence_branches = params.fetch(:convergence_branches, \"\").split(',')\n      render template: 'repositories/new'\n    end\n  end\n\n  def new\n    @repository = Repository.new\n    @repository.run_ci = true\n    @current_convergence_branches = ['master']\n  end\n\n  def destroy\n    ActiveRecord::Base.no_touching do\n      Repository.destroy(params[:id])\n    end\n    redirect_to repositories_path\n  end\n\n  def update\n    @repository = Repository.find(params[:id])\n\n    if @repository.update_attributes(repository_params) && update_convergence_branches\n      flash[:message] = \"Settings updated.\"\n      redirect_to repository_edit_url(@repository)\n    else\n      @current_convergence_branches = params.fetch(:convergence_branches, \"\").split(',')\n      render template: 'repositories/edit'\n    end\n  end\n\n  def edit\n    r_namespace, r_name = params[:repository_path].split('/')\n    @repository = Repository.where(namespace: r_namespace, name: r_name).first!\n\n    @current_convergence_branches = @repository.branches.where(convergence: true).select(:name).collect(&:name)\n  end\n\n  def index\n    @repositories = Repository.all\n  end\n\n  def dashboard\n    @branches =\n      Branch.joins(:repository)\n            .includes(:repository)\n            .where(name: 'master')\n            .order('repositories.name')\n            .decorate\n  end\n\n  # build_ref is intended to be used by the Stash webhooks plugin\n  # https://marketplace.atlassian.com/plugins/com.atlassian.stash.plugin.stash-web-post-receive-hooks-plugin\n  def build_ref\n    repository = Repository.find(params[:id])\n\n    # Query string parameters are provided for easy integrations, since it the\n    # simplest to implement.\n    changes = if params[:refChanges]\n                params[:refChanges].map do |change|\n                  [\n                    change[:refId].gsub(/^refs\\/heads\\//, ''),\n                    change[:toHash]\n                  ]\n                end\n              else\n                [params.values_at(:ref, :sha)]\n              end\n\n    result = changes.map do |ref, sha|\n      ensure_build(repository, ref, sha)\n    end\n\n    render json: {\n      builds: result.map { |build|\n        {\n          id:        build.id,\n          build_url: repository_build_url(repository, build)\n        }\n      }\n    }\n  end\n\n  def ensure_build(repository, branch_name, sha)\n    branch = repository.branches.where(name: branch_name).first_or_create!\n\n    build = repository.ensure_build_exists(sha, branch)\n    branch.abort_in_progress_builds_behind_build(build) unless branch.convergence?\n\n    build\n  end\n\n  private\n\n  def repository_params\n    params.require(:repository)\n          .permit(:enabled, :url, :timeout, :build_pull_requests,\n                  :run_ci, :on_green_update, :send_build_success_email,\n                  :send_build_failure_email, :allows_kochiku_merges,\n                  :email_on_first_failure, :send_merge_successful_email,\n                  :assume_lost_after)\n  end\n\n  # update_convergence_branches is called by both create and update. This\n  # method does more work than is necessary for create but it is used to avoid\n  # duplicating code.\n  def update_convergence_branches\n    new_branch_names = params.fetch(:convergence_branches, \"\").split(',').map(&:strip)\n    current_convergence_branches = @repository.branches.where(convergence: true).all\n    current_branch_names = current_convergence_branches.collect(&:name)\n    remove_convergence_from = current_branch_names - new_branch_names\n    add_convergence_to = new_branch_names - current_branch_names\n\n    remove_convergence_from.each do |name|\n      current_convergence_branches.detect { |branch|\n        branch.name == name\n      }.update!(convergence: false)\n    end\n\n    add_convergence_to.each do |name|\n      branch = @repository.branches.where(name: name).first_or_create!\n      branch.update!(convergence: true)\n    end\n    true\n  end\nend\n"
  },
  {
    "path": "app/controllers/status_controller.rb",
    "content": "class StatusController < ApplicationController\n\n  def available\n    if File.exist?(Rails.root.join(\"tmp/maintenance\"))\n      head :service_unavailable\n    else\n      head :ok\n    end\n  end\n\nend\n"
  },
  {
    "path": "app/decorators/branch_decorator.rb",
    "content": "require 'set'\n\nclass BranchDecorator < Draper::Decorator\n  delegate_all\n\n  def most_recent_build_state\n    object.most_recent_build.try(:state) || 'unknown'\n  end\n\n  def last_build_duration\n    object.last_completed_build.try(:elapsed_time)\n  end\n\n  # Recent build timing information grouped by test types.\n  def build_time_history(fuzzy_limit = 1000)\n    result = Hash.new { |hash, key| hash[key] = [] }\n\n    builds = {}\n    build_types = Set.new\n    object.timing_data_for_recent_builds.each do |timing_data|\n      next if timing_data.empty?\n      build_type = timing_data.shift # the type of test that was executed (e.g. cucumber)\n      build_id = timing_data[4] # e.g 65874\n      build_types.add(build_type)\n      builds[build_id] ||= {}\n      builds[build_id][build_type] = timing_data\n    end\n\n    builds.keys.sort.each do |build|\n      build_types.each do |build_type|\n        timing_data = builds[build][build_type] || [] # jquery.flot dislikes missing data\n        result[build_type] << timing_data\n      end\n    end\n\n    result\n  end\nend\n"
  },
  {
    "path": "app/decorators/build_part_decorator.rb",
    "content": "class BuildPartDecorator < Draper::Decorator\n  delegate_all\n\n  def most_recent_stdout_artifact\n    BuildArtifact\n      .joins(:build_attempt => :build_part)\n      .where(\n        'build_attempts.build_part_id' => object.id,\n        'build_attempts.state' => BuildAttempt::COMPLETED_BUILD_STATES\n      ).stdout_log.last\n  end\nend\n"
  },
  {
    "path": "app/helpers/application_helper.rb",
    "content": "module ApplicationHelper\n  def duration_strftime(duration_in_seconds, format = \"%H:%M:%S\")\n    return \"N/A\" if duration_in_seconds.nil? ||\n                    (duration_in_seconds.respond_to?(:nan?) && duration_in_seconds.nan?)\n    (Time.mktime(0) + duration_in_seconds).strftime(format).sub(/^00[ :h]+0?/, \"\")\n  end\n\n  def time_for(time, format = \"%H:%M\")\n    time.strftime(format)\n  end\n\n  def build_success_in_words(build)\n    case build.state\n    when 'succeeded'\n      'success'\n    when 'errored', 'doomed'\n      'failed'\n    else\n      build.state.to_s\n    end\n  end\n\n  def build_activity(build)\n    return \"Unknown\" unless build.is_a?(Build)\n\n    case build.state\n    when 'partitioning', 'runnable', 'running'\n      \"Building\"\n    when 'doomed', 'failed', 'succeeded', 'errored'\n      \"CheckingModifications\"\n    end\n  end\n\n  def link_to_commit(repo, commit_sha)\n    link_to(commit_sha[0, 7], show_link_to_commit(repo, commit_sha))\n  end\n\n  def link_to_branch(build)\n    branch_record = build.branch_record\n    branch_name = branch_record.name\n    link_to(branch_name, branch_record.repository.get_branch_url(branch_name))\n  end\n\n  def show_link_to_commit(repo, commit_sha)\n    repo.remote_server.href_for_commit(commit_sha).to_s\n  end\n\n  def show_link_to_compare(build, first_commit_hash, second_commit_hash)\n    repo = build.repository\n    attrs_from_remote_server = RemoteServer.for_url(repo.url)\n\n    if attrs_from_remote_server.class == RemoteServer::Stash\n      second_commit_hash = repo.on_green_update.blank? ? \"\" : repo.on_green_update.split(',').first\n    end\n    attrs_from_remote_server.url_for_compare(first_commit_hash, second_commit_hash)\n  end\n\n  def show_link_to_create_pull_request(build)\n    build.repository.open_pull_request_url(build.branch_record.name)\n  end\n\n  def timeago(time, options = {})\n    options[:class] ||= \"timeago\"\n    content_tag(:abbr, time.to_s, options.merge(:title => time.getutc.iso8601)) if time\n  end\nend\n"
  },
  {
    "path": "app/helpers/build_helper.rb",
    "content": "module BuildHelper\n  def build_metadata_headers(build, display_ruby_version)\n    headers = []\n    headers << \"Ruby Version\" if display_ruby_version\n\n    if is_a_build_with_one_part?(build)\n      headers << \"Target\"\n    else\n      headers << \"Paths\"\n    end\n    headers\n  end\n\n  def build_metadata_values(build, build_part, display_ruby_version)\n    values = []\n    values << build_part.options[\"ruby\"] if display_ruby_version\n    values << format_paths(build_part)\n    values\n  end\n\n  def format_paths(build_part)\n    if build_part.options['total_workers'] && build_part.options['worker_chunk']\n      build_part.paths.first + \" - Chunk #{build_part.options['worker_chunk']} of #{build_part.options['total_workers']}\"\n    elsif build_part.paths.size == 1\n      if build_part.paths.first == \"/dev/null\"\n        build_part.kind\n      else\n        build_part.paths.first\n      end\n    else\n      first, *rest = build_part.paths\n      first = first.sub(/([^\\/]+)/, '<b class=\"root\">\\1</b>')\n      paths = [first, rest].join(', ')\n      \"#{build_part.paths.length} <span class=\\\"paths\\\" title=\\\"#{build_part.paths.join(', ')}\\\">(#{paths})</span>\".html_safe\n    end\n  end\n\n  def multiple_ruby_versions?(build)\n    build.build_parts.map { |bp| bp.options['ruby'] }.compact.uniq.size > 1\n  end\n\n  def is_a_build_with_one_part?(build)\n    build.build_parts.none? { |build_part| build_part.paths.size > 1 }\n  end\n\n  def eligible_for_merge_on_success?(build)\n    !build.succeeded? && !build.branch_record.convergence? && build.repository.allows_kochiku_merges?\n  end\nend\n"
  },
  {
    "path": "app/helpers/mail_helper.rb",
    "content": "module MailHelper\n  def failed_build_part_sentence(build_part)\n    stdout_log = build_part.most_recent_stdout_artifact\n    str = \"failed after #{build_part.elapsed_time.to_i / 60} minutes\"\n    if stdout_log\n      str += \", for details you can go directly to the #{link_to('stdout', build_artifact_url(stdout_log))} log.\"\n    end\n    str.html_safe\n  end\n\n  def failed_build_paths(build_part)\n    paths = build_part.paths\n\n    str = if build_part.kind.include?('spec')\n            paths.map { |path| path.split('/').last }\n          else\n            paths\n          end\n\n    str.join(', ').truncate(200)\n  end\nend\n"
  },
  {
    "path": "app/helpers/project_stats_helper.rb",
    "content": "module ProjectStatsHelper\n  def pass_rate_css_class(rate)\n    case rate.to_i\n    when 0..39 then 'bad'\n    when 40..75 then 'decent'\n    else 'great'\n    end\n  end\n\n  def rebuild_count_css_class(attempts)\n    case attempts\n    when 0..1 then 'great'\n    when 1..4 then 'decent'\n    else 'bad'\n    end\n  end\n\n  # A string representing the percentage of builds that eventually passed\n  def eventual_pass_rate(builds)\n    pass_rate_text(builds.select(&:succeeded?).size / builds.size.to_f)\n  end\n\n  # A string representing the percentage of the builds that had\n  # all tests pass on the first try.\n  def error_free_pass_rate(builds)\n    error_free_count = builds.to_a.count do |build|\n      build.succeeded? && build.build_parts.all_passed_on_first_try?\n    end\n    total_count = builds.to_a.count(&:completed?)\n    pass_rate_text(error_free_count / total_count.to_f)\n  end\n\n  def pass_rate_text(number)\n    format(\"%1.0f%%\", 100 * number)\n  end\n\n  # Calculates the average number of rebuilds required before builds succeed.\n  # Only considers builds that are successful because builds that are not yet\n  # successful would skew the calculation.\n  def average_number_of_rebuilds(builds)\n    successful_builds = builds.select(&:succeeded?)\n    total_build_parts, total_build_attempts = 0, 0\n\n    successful_builds.each do |build|\n      total_build_attempts += build.build_attempts.count\n      total_build_parts += build.build_parts.count\n    end\n\n    (total_build_attempts - total_build_parts) / successful_builds.size.to_f\n  end\n\n  def median_elapsed_time(builds)\n    successful_builds = builds.select(&:succeeded?)\n    elapsed_times = successful_builds.map { |build| build.elapsed_time || 0 }\n    times = elapsed_times.length\n    if times.zero?\n      nil\n    else\n      elapsed_times.sort!\n      (elapsed_times[(times - 1) / 2] + elapsed_times[times / 2]) / 2.0\n    end\n  end\n\n  def seconds_to_minutes(seconds)\n    (seconds / 60).round if seconds.is_a?(Numeric)\n  end\nend\n"
  },
  {
    "path": "app/jobs/build_attempt_job.rb",
    "content": "require 'job_base'\n\n# Keep this interface so we can easily enqueue new jobs.\n# The job is handled by kochiku-worker\nclass BuildAttemptJob < JobBase\n  class WrongBuildAttemptJobClassError < StandardError; end\n\n  def initialize(build_options)\n    raise WrongBuildAttemptJobClassError, \"BuildAttemptJob was processed by the BuildAttemptJob shim in Kochiku instead of real class in Kochiku-worker.\"\n  end\n\n  def perform\n  end\nend\n"
  },
  {
    "path": "app/jobs/build_initiated_by_job.rb",
    "content": "require 'job_base'\nrequire 'git_repo'\n\nclass BuildInitiatedByJob < JobBase\n  extend Resque::Plugins::Retry\n  @queue = :low\n\n  @retry_limit = 5\n  @retry_exceptions = {GitRepo::RefNotFoundError => [60, 60, 60, 180, 360],\n                       Cocaine::ExitStatusError => [30, 60, 60, 60, 60] }\n\n  def initialize(build_id)\n    @build = Build.find(build_id)\n  end\n\n  def perform\n    return if @build.initiated_by\n    email = GitBlame.last_email_in_branch(@build).first\n    if email.present?\n      @build.update_attributes(initiated_by: email)\n    end\n  end\nend\n"
  },
  {
    "path": "app/jobs/build_partitioning_job.rb",
    "content": "require 'job_base'\nrequire 'git_repo'\nrequire 'partitioner'\n\nclass BuildPartitioningJob < JobBase\n  extend Resque::Plugins::Retry\n  @queue = :partition\n\n  @retry_limit = 5\n  @retry_exceptions = {GitRepo::RefNotFoundError => [60, 60, 60, 180, 360],\n                       Cocaine::ExitStatusError => [30, 60, 60, 60, 60] }\n\n  def initialize(build_id)\n    @build = Build.find(build_id)\n  end\n\n  def perform\n    if @build.test_command.blank?\n      error_message = \"No test_command specified in kochiku.yml.\"\n      @build.update!(:error_details => { :message => error_message, :backtrace => nil }, :state => 'errored')\n    else\n      partitioner = Partitioner.for_build(@build)\n      parts = partitioner.partitions\n      if parts.empty? && partitioner.partitioner_type == \"Go\"\n        @build.update!(:state => 'succeeded')\n      else\n        @build.partition(parts)\n      end\n    end\n    @build.update_commit_status!\n  end\n\n  def on_exception(e)\n    if self.class.retry_exception?(e) && !self.class.retry_limit_reached?\n      @build.update_attributes!(:state => :waiting_for_sync)\n    else\n      @build.update_attributes!(\n        :state => 'errored',\n        :error_details => { :message => e.to_s, :backtrace => e.backtrace.join(\"\\n\") }\n      )\n      @build.update_commit_status!\n    end\n    super\n  end\nend\n"
  },
  {
    "path": "app/jobs/build_state_update_job.rb",
    "content": "require 'job_base'\nrequire 'git_repo'\nrequire 'github_commit_status'\n\n# this job updates the remote repo. it is enqueued when a build's state changes.\nclass BuildStateUpdateJob < JobBase\n  @queue = :high\n\n  def initialize(build_id)\n    @build_id = build_id\n  end\n\n  def perform\n    build = Build.find(@build_id)\n\n    # notify github/stash that the build status has changed\n    build.update_commit_status!\n\n    # trigger another build for this branch if there is unbuilt commits\n    if build.branch_record.convergence? && build.completed?\n      sha = build.repository.sha_for_branch(build.branch_record.name)\n      build.branch_record.kickoff_new_build_unless_currently_busy(sha)\n    end\n\n    build.send_build_status_email!\n\n    if build.succeeded?\n      if !build.on_success_script_log_file.present? && build.on_success_script.present?\n        BuildStrategy.run_success_script(build)\n      end\n    end\n\n    if build.promotable?\n      build.promote!\n    elsif build.merge_on_success_enabled?\n      if build.mergable_by_kochiku?\n        # ACHTUNG merge to master isn't right anymore. This part my have been changed by shenil\n        build.merge_to_master!\n      else\n        Rails.logger.warn(\"Build #{build.id} has merge_on_success enabled but cannot be merged.\")\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "app/jobs/enforce_timeouts_job.rb",
    "content": "# The EnforceTimeoutsJob searches for BuildAttempts that were picked up by a\n# kochiku worker but never heard back from again. It compares (Time.now -\n# started_at) against the timeout value of the repository. If the maximum time\n# has elapsed, it will mark the BuildAttempt as errored and kick off a rebuild.\nclass EnforceTimeoutsJob\n  def self.perform\n    # The EnforceTimeoutsJob runs frequently so we do not check BuildAttempts greater than 1 day old\n    BuildAttempt.where(\"created_at > ? AND state = 'running' AND started_at IS NOT NULL\", 1.day.ago).each do |attempt|\n      lenient_timeout = attempt.build_instance.repository.timeout + 5\n      if attempt.elapsed_time > lenient_timeout.minutes\n        # Error artifact creation taken from kochiku-worker\n        message = StringIO.new\n        message.puts(\"This BuildAttempt has not been updated by its worker,\\n\" \\\n                     \"and has been running longer then the timeout so it has\\n\" \\\n                     \"been considered lost by Kochiku.\")\n        message.rewind\n        def message.path\n          'error.txt'\n        end\n\n        BuildArtifact.create(:build_attempt_id => attempt.id, :log_file => message)\n        attempt.update!(state: 'errored', finished_at: Time.current)\n        Rails.logger.error \"Errored BuildAttempt:#{attempt.id} due to timeout\"\n\n        # Enqueue another BuildAttempt if this is the most recent attempt for the BuildPart\n        part = attempt.build_part\n        part.rebuild! if part.build_attempts.last == attempt\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "app/jobs/job_base.rb",
    "content": "class JobBase\n  class << self\n    def enqueue(*args)\n      Resque.enqueue(self, *args)\n    end\n\n    def enqueue_on(build_queue, *args)\n      Resque::Job.create(build_queue, self, *args)\n      Resque::Plugin.after_enqueue_hooks(self).each do |hook|\n        klass.send(hook, *args)\n      end\n    end\n\n    def perform(*args)\n      job = new(*args)\n      job.perform\n    rescue => e\n      if job\n        job.on_exception(e)\n      else\n        raise e\n      end\n    end\n  end\n\n  def on_exception(e)\n    raise e\n  end\nend\n"
  },
  {
    "path": "app/jobs/poll_repositories_job.rb",
    "content": "class PollRepositoriesJob\n  def self.perform\n    Repository.where(enabled: true).find_each(batch_size: 10) do |repo|\n      branch = repo.convergence_branches.first || repo.branches.where(name: 'master').first\n\n      if branch.nil?\n        Rails.logger.warn(\"[PollRepositoriesJob] Could not find a branch to check for repo #{repo.name_with_namespace}\")\n      end\n\n      begin\n        head = repo.sha_for_branch(branch.name)\n      rescue RemoteServer::AccessDenied, RemoteServer::RefDoesNotExist, Zlib::BufError => e\n        Rails.logger.error(\"[PollRepositoriesJob] Exception #{e} occurred for repo #{repo.id}:#{repo.name_with_namespace}. Automatically setting the repository to disabled.\")\n        repo.update!(enabled: false)\n        next\n      end\n\n      unless repo.build_for_commit(head)\n        branch.builds.create!(ref: head, state: 'partitioning')\n        Rails.logger.info \"Build created for #{repo.namespace}/#{repo.name}:#{branch.name} at #{head}\"\n      end\n\n      sleep 0.5 # take a breath\n    end\n  end\nend\n"
  },
  {
    "path": "app/jobs/timeout_stuck_builds_job.rb",
    "content": "class TimeoutStuckBuildsJob < JobBase\n  @queue = :high\n\n  def self.perform\n    clean_lost_builds\n    clean_runnable_not_queued\n  end\n\n  def self.clean_runnable_not_queued\n    # check for builds in runnable that are no longer in the queue\n    missing = []\n    BuildAttempt.select(\"build_attempts.id\", \" build_parts.queue as queue\").joins(:build_part)\n                .where(\"build_attempts.state = 'runnable' AND build_attempts.created_at < ? AND build_attempts.created_at > ?\", 5.minutes.ago, 1.day.ago)\n                .group_by(&:queue)\n                .each do |queue, attempts|\n                  current_queue = Resque.redis.lrange(\"queue:#{queue}\", 0, -1).to_s\n                  missing += attempts.reject { |attempt| current_queue.match(/build_attempt_id\\\\*\\\"\\:#{attempt.id}[^0-9]/) }\n                end\n\n    missing.select! { |build_attempt_partial| BuildAttempt.find(build_attempt_partial.id).state == 'runnable' }\n    missing.each { |build_attempt_partial| BuildAttempt.find(build_attempt_partial.id).finish!('errored') }\n  end\n\n  def self.clean_lost_builds\n    # check for builds that have hit their assume_lost_after\n    Repository.where(\"assume_lost_after IS NOT NULL\").find_each do |repo|\n      repo.build_attempts.where(\"build_attempts.state = 'running' AND build_attempts.started_at < ?\", repo.assume_lost_after.minutes.ago).each do |build_attempt|\n        build_attempt.finish!('errored')\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "app/mailers/build_mailer.rb",
    "content": "class BuildMailer < ActionMailer::Base\n  helper :application, :mail\n\n  default :from => Proc.new { Settings.sender_email_address }\n\n  private\n\n  def pull_request_link(build)\n    @build = build\n\n    remote_server = @build.repository.remote_server\n    if remote_server.class == RemoteServer::Stash && !@build.branch_record.convergence?\n      begin\n        id, _ = remote_server.get_pr_id_and_version(@build.branch_record.name)\n        return \"#{remote_server.base_html_url}/pull-requests/#{id}/overview\"\n      rescue RemoteServer::StashAPIError\n        # not all branches will have an open pull request\n        return nil\n      end\n    end\n    nil\n  end\n\n  public\n\n  def error_email(build_attempt, error_text = nil)\n    @build_part = build_attempt.build_part\n    @builder = build_attempt.builder\n    @error_text = error_text\n    mail :to => Settings.kochiku_notifications_email_address,\n         :subject => \"[kochiku] Build part errored on #{@builder}\",\n         :from => Settings.sender_email_address\n  end\n\n  def build_break_email(build)\n    @build = build\n\n    # Allow the partitioner to be selective about who is emailed\n    partitioner = Partitioner.for_build(@build)\n    @responsible_email_and_files = partitioner.emails_for_commits_causing_failures\n    @emails = @responsible_email_and_files.keys\n    if @emails.empty?\n      @emails = if @build.branch_record.convergence?\n                  GitBlame.emails_since_last_green(@build)\n                else\n                  GitBlame.emails_in_branch(@build)\n                end\n    end\n\n    @git_changes = if @build.branch_record.convergence?\n                     GitBlame.changes_since_last_green(@build)\n                   else\n                     GitBlame.changes_in_branch(@build)\n                   end\n\n    @failed_build_parts = @build.build_parts.failed_or_errored.decorate\n    @pr_link = pull_request_link(build)\n\n    mail :to => @emails,\n         :bcc => Settings.kochiku_notifications_email_address,\n         :subject => \"[kochiku] Failure - #{@build.branch_record.name} build for #{@build.repository.name}\",\n         :from => Settings.sender_email_address\n  end\n\n  def build_success_email(build)\n    @build = build\n    @email = GitBlame.last_email_in_branch(@build)\n    @git_changes = GitBlame.changes_in_branch(@build)\n    @pr_link = pull_request_link(build)\n\n    mail :to => @email,\n         :bcc => Settings.kochiku_notifications_email_address,\n         :subject => \"[kochiku] Success - #{@build.branch_record.name} build for #{@build.repository.name}\",\n         :from => Settings.sender_email_address\n  end\nend\n"
  },
  {
    "path": "app/mailers/merge_mailer.rb",
    "content": "class MergeMailer < ActionMailer::Base\n  helper :application\n\n  default :from => Proc.new { Settings.sender_email_address }\n\n  def merge_successful(build, merge_commit, emails, stdout_and_stderr)\n    @build = build\n    @merge_commit = merge_commit\n    @stdout_and_stderr = stdout_and_stderr\n\n    mail(:to => emails,\n         :bcc => Settings.kochiku_notifications_email_address,\n         :subject => \"[kochiku] Merged #{@build.branch_record.name} branch for #{@build.repository.name}\")\n  end\n\n  def merge_failed(build, emails, stdout_and_stderr)\n    @build = build\n    @stdout_and_stderr = stdout_and_stderr\n    mail(:to => emails,\n         :bcc => Settings.kochiku_notifications_email_address,\n         :subject => \"[kochiku] Failed to merge #{@build.branch_record.name} branch for #{@build.repository.name}\")\n  end\nend\n"
  },
  {
    "path": "app/models/branch.rb",
    "content": "class Branch < ActiveRecord::Base\n  belongs_to :repository\n  has_many :builds, :dependent => :destroy, :inverse_of => :branch_record\n\n  validates :name, :presence => true\n\n  def to_param\n    self.name\n  end\n\n  def kickoff_new_build_unless_currently_busy(ref)\n    last_build = builds.last\n    if last_build && !last_build.completed?\n      last_build\n    else\n      builds.create_with(state: 'partitioning').find_or_create_by!(ref: ref)\n    end\n  end\n\n  def abort_in_progress_builds_behind_build(current_build)\n    builds.where(state: Build::IN_PROGRESS_STATES).readonly(false)\n          .reject { |build| build.id >= current_build.id }\n          .each { |build| build.abort! }\n  end\n\n  def most_recent_build\n    @most_recent_build ||= builds.last\n  end\n\n  def last_completed_build\n    @last_completed_build ||= builds.completed.last\n  end\n\n  # The fuzzy_limit is used to set a upper bound on the amount of time that the\n  # sql query will take\n  def timing_data_for_recent_builds(fuzzy_limit = 1000)\n    id_cutoff = builds.maximum(:id).to_i - fuzzy_limit\n\n    self.class.connection.execute(build_time_history_sql(id_cutoff))\n  end\n\n  private\n\n  def build_time_history_sql(min_build_id)\n    return <<-SQL\n      SELECT build_parts.kind AS kind,\n             SUBSTR(builds.ref, 1, 5) AS ref,\n             IFNULL(FLOOR(ROUND(MAX(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)), 0) AS max,\n             IFNULL(FLOOR(ROUND(MAX(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)) - FLOOR(ROUND(MIN(UNIX_TIMESTAMP(build_attempts.finished_at) - UNIX_TIMESTAMP(build_attempts.started_at)) / 60)), 0) AS min_diff,\n             0 AS max_diff,\n             builds.id,\n             builds.state,\n             builds.created_at\n        FROM builds\n   LEFT JOIN build_parts ON build_parts.build_id = builds.id\n   LEFT JOIN build_attempts ON build_attempts.build_part_id = build_parts.id\n       WHERE builds.branch_id = #{id}\n         AND builds.id >= #{min_build_id}\n         AND (build_attempts.id IS NULL OR build_attempts.id = (\n               SELECT id\n                 FROM build_attempts\n                WHERE build_part_id = build_parts.id\n             ORDER BY id DESC\n                LIMIT 1\n             ))\n    GROUP BY builds.id, build_parts.kind, builds.state, builds.created_at\n    SQL\n  end\nend\n"
  },
  {
    "path": "app/models/build.rb",
    "content": "require 'on_success_uploader'\nrequire 'fileless_io'\nrequire 'build_partitioning_job'\nrequire 'build_initiated_by_job'\n\nclass Build < ActiveRecord::Base\n  # using 'branch_record' instead of 'branch' because Build has a legacy 'branch' string type column. The legacy column will be removed soon.\n  belongs_to :branch_record, :class_name => \"Branch\", :foreign_key => \"branch_id\", :inverse_of => :builds, :touch => true\n  has_one :repository, :through => :branch_record\n  has_many :build_parts, :dependent => :destroy, :inverse_of => :build_instance do\n    def not_passed_and_last_attempt_in_state(*state)\n      joins(:build_attempts).joins(<<-EOSQL).where(\"build_attempts.state\" => state, \"passed_attempt.id\" => nil, \"newer_attempt.id\" => nil)\n        LEFT JOIN build_attempts\n          AS passed_attempt\n          ON build_attempts.build_part_id = passed_attempt.build_part_id\n            AND passed_attempt.state = 'passed'\n        LEFT JOIN build_attempts\n          AS newer_attempt\n          ON build_attempts.build_part_id = newer_attempt.build_part_id\n            AND newer_attempt.id > build_attempts.id\n      EOSQL\n    end\n\n    def passed\n      joins(:build_attempts).where(\"build_attempts.state\" => 'passed').group(\"build_parts.id\")\n    end\n\n    def failed\n      not_passed_and_last_attempt_in_state('failed')\n    end\n\n    def failed_or_errored\n      not_passed_and_last_attempt_in_state('failed', 'errored')\n    end\n\n    def failed_errored_or_aborted\n      not_passed_and_last_attempt_in_state('failed', 'errored', 'aborted')\n    end\n\n    def errored\n      not_passed_and_last_attempt_in_state('errored')\n    end\n\n    def all_passed_on_first_try?\n      successful_build_attempts = joins(:build_attempts).where(\"build_attempts.state\" => 'passed').count\n      unsuccessful_build_attempts = joins(:build_attempts).where(\"build_attempts.state != ?\", 'passed').count\n      successful_build_attempts > 0 && unsuccessful_build_attempts == 0\n    end\n  end\n  has_many :build_attempts, :through => :build_parts\n  TERMINAL_STATES = %w[failed succeeded errored aborted].freeze\n  FAILED_STATES = %w[failed errored doomed].freeze\n  IN_PROGRESS_STATES = %w[waiting_for_sync partitioning runnable running doomed].freeze\n  STATES = IN_PROGRESS_STATES + TERMINAL_STATES\n  validates :state, inclusion: { in: STATES }\n  serialize :error_details, Hash\n  serialize :kochiku_yml_config, Hash\n\n  validates :branch_id, presence: true\n  validates :ref, presence: true,\n                  length: { is: 40, allow_blank: true },\n                  uniqueness: { scope: :branch_id, allow_blank: true }\n\n  mount_uploader :on_success_script_log_file, OnSuccessUploader\n\n  after_commit :enqueue_partitioning_job, :on => :create\n  after_commit :enqueue_initiated_by, on: :create\n\n  scope :completed, -> { where(state: TERMINAL_STATES) }\n\n  def test_command\n    tc = self[:test_command]\n    if tc.nil?\n      tc = (kochiku_yml && kochiku_yml.key?('test_command')) ? kochiku_yml['test_command'] : repository.test_command\n      self.update_attributes(test_command: tc)\n    end\n    tc\n  end\n\n  def on_success_script\n    (kochiku_yml && kochiku_yml.key?('on_success_script')) ? kochiku_yml['on_success_script'] : nil\n  end\n\n  def previous_build\n    branch_record.builds.where(\"id < ?\", self.id).order(\"id DESC\").first\n  end\n\n  def previous_successful_build\n    Build.where(branch_id: self.branch_id, state: 'succeeded').where(\"id < ?\", self.id).order(\"id DESC\").first\n  end\n\n  def enqueue_partitioning_job\n    Resque.enqueue(BuildPartitioningJob, self.id) if repository.enabled?\n  end\n\n  def kochiku_yml\n    if @kochiku_yml.nil?\n      # try to load the kochiku.yml info from the build's repo, if it is not in the DB already\n      update_attributes!(kochiku_yml_config: GitRepo.load_kochiku_yml(repository, ref)) if kochiku_yml_config.empty?\n\n      # if there's actually no kochiku.yml file for the build, the kochiku_yml_config\n      # attribute would still be empty, even after the above update.\n      @kochiku_yml = kochiku_yml_config.empty? ? false : kochiku_yml_config\n    else\n      @kochiku_yml\n    end\n  end\n\n  def partition(parts)\n    return unless repository.enabled?\n    transaction do\n      update_attributes!(:state => 'runnable')\n      parts.each do |part|\n        build_parts.create!(:kind => part['type'],\n                            :paths => part['files'],\n                            :queue => part['queue'],\n                            :retry_count => part['retry_count'],\n                            :options => part['options'])\n      end\n    end\n\n    build_parts.each { |build_part| build_part.create_and_enqueue_new_build_attempt! }\n  end\n\n  def update_state_from_parts!\n    return if build_parts.empty?\n\n    errored = build_parts.errored\n    passed = build_parts.passed\n    failed = build_parts.failed\n    next_state = case\n                 when (build_parts - passed).empty?\n                   'succeeded'\n                 when self.state == 'aborted'\n                   'aborted'\n                 when errored.any?\n                   'errored'\n                 when (passed | failed).count == build_parts.count\n                   'failed'\n                 else\n                   failed.empty? ? 'running' : 'doomed'\n                 end\n\n    previous_state = self.state\n    update_attributes!(:state => next_state) unless previous_state == next_state\n    [previous_state, next_state]\n  end\n\n  def update_commit_status!\n    repository.remote_server.update_commit_status!(self)\n  end\n\n  # As implemented, finished_at will return the wrong value if there is a\n  # unsuccessful attempt following a successful one. Left this way for\n  # performance and simplicity.\n  def finished_at\n    build_attempts.maximum(:finished_at)\n  end\n\n  def elapsed_time\n    last_finished_at = finished_at\n    return nil if last_finished_at.blank?\n    last_finished_at - created_at\n  end\n\n  def linear_time\n    build_parts.inject(0) do |sum, part|\n      sum + (part.elapsed_time || 0)\n    end\n  end\n\n  def retry_count\n    build_parts.sum(0) do |part|\n      part.build_attempts.count - 1\n    end\n  end\n\n  def max_retries\n    build_parts.max_by { |part| part.build_attempts.count }.build_attempts.count - 1\n  end\n\n  # This can be used as `building_time` under the assumption that\n  # all parts executed in parallel.\n  def longest_build_part\n    build_parts.max_by { |part| part.elapsed_time || 0 }.elapsed_time\n  end\n\n  def idle_time\n    (elapsed_time || 0) - (longest_build_part || 0)\n  end\n\n  def succeeded?\n    state == 'succeeded'\n  end\n\n  def failed?\n    FAILED_STATES.include?(state)\n  end\n\n  # has a build part with failed attempts but no successful ones yet\n  def already_failed?\n    build_parts.any? { |part| part.build_attempts.unsuccessful.exists? && !part.build_attempts.where(state: 'passed').exists? }\n  end\n\n  def aborted?\n    state == 'aborted'\n  end\n\n  def promotable?\n    succeeded? && branch_record.convergence?\n  end\n\n  def mergable_by_kochiku?\n    succeeded? && merge_on_success_enabled? && repository.allows_kochiku_merges? && !newer_branch_build_exists?\n  end\n\n  def merge_on_success_enabled?\n    !branch_record.convergence? && self.merge_on_success\n  end\n\n  def newer_branch_build_exists?\n    most_recent_build = branch_record.most_recent_build\n    most_recent_build.id != self.id\n  end\n\n  def merge_to_master!\n    BuildStrategy.merge_ref(self)\n  end\n\n  def promote!\n    unless promoted?\n      BuildStrategy.promote_build(self)\n      update!(promoted: true)\n    end\n  end\n\n  def completed?\n    TERMINAL_STATES.include?(state)\n  end\n\n  # Changes the build state to 'aborted'. Sets merge_on_success to false to\n  # protect against accidental merges. Updates the state of all of the build's\n  # 'runnable' build_parts to be 'aborted'.\n  def abort!\n    update!(state: 'aborted', merge_on_success: false)\n\n    BuildAttempt\n      .joins(:build_part)\n      .where(:state => 'runnable', 'build_parts.build_id' => self.id)\n      .update_all(state: 'aborted', updated_at: Time.current)\n  end\n\n  def to_color\n    case state\n    when 'succeeded'\n      :green\n    when 'failed', 'errored', 'aborted', 'doomed'\n      :red\n    else\n      :blue\n    end\n  end\n\n  def to_png\n    case to_color\n    when :green\n      status_png(179, 247, 110)\n    when :red\n      status_png(247, 110, 110)\n    when :blue\n      status_png(110, 165, 247)\n    end\n  end\n\n  def send_build_status_email!\n    return if branch_record.convergence? && !previous_successful_build\n\n    if completed?\n      if failed? && !build_failure_email_sent? && repository.send_build_failure_email?\n        unless build_failure_email_sent?\n          BuildMailer.build_break_email(self).deliver_now\n          update(build_failure_email_sent: true)\n        end\n      elsif succeeded? && !branch_record.convergence? && !build_success_email_sent? && repository.send_build_success_email?\n        BuildMailer.build_success_email(self).deliver_now\n        update(build_success_email_sent: true)\n      end\n    elsif !branch_record.convergence? && repository.email_on_first_failure && already_failed? && repository.send_build_failure_email?\n      unless build_failure_email_sent?\n        # due to race condition, update attribute before sending email\n        update(build_failure_email_sent: true)\n        BuildMailer.build_break_email(self).deliver_now\n      end\n    end\n  end\n\n  def is_running?\n    IN_PROGRESS_STATES.include?(self.state)\n  end\n\n  def as_json(options = {})\n    # exclude test_command by default\n    options[:except] ||= [:test_command]\n    super(options.reverse_merge(methods: :elapsed_time))\n  end\n\n  private\n\n  def enqueue_initiated_by\n    Resque.enqueue(BuildInitiatedByJob, self.id)\n  end\n\n  def status_png(r, g, b)\n    ChunkyPNG::Canvas.new(13, 13, ChunkyPNG::Color::TRANSPARENT)\n                     .circle(6, 6, 5, ChunkyPNG::Color::BLACK, ChunkyPNG::Color.rgb(r, g, b))\n  end\nend\n"
  },
  {
    "path": "app/models/build_artifact.rb",
    "content": "require 'log_file_uploader'\n\nclass BuildArtifact < ActiveRecord::Base\n  belongs_to :build_attempt, :inverse_of => :build_artifacts, :touch => true\n  mount_uploader :log_file, LogFileUploader\n  skip_callback :commit, :after, :remove_log_file!\n  validates :log_file, presence: true\n\n  scope :stdout_log, -> { where(:log_file => ['stdout.log.gz', 'stdout.log']) }\n  scope :error_txt, -> { where(:log_file => 'error.txt') }\n\n  def as_json\n    super(except: \"log_file\").tap do |hash|\n      log_file = {\"url\" => Rails.application.routes.url_helpers.build_artifact_path(self), \"name\" => self.log_file.path}\n      hash[\"build_artifact\"][\"log_file\"] = log_file\n    end\n  end\nend\n"
  },
  {
    "path": "app/models/build_attempt.rb",
    "content": "class BuildAttempt < ActiveRecord::Base\n  has_many :build_artifacts, :dependent => :destroy, :inverse_of => :build_attempt\n  belongs_to :build_part, :inverse_of => :build_attempts, :touch => true\n  has_one :build_instance, through: :build_part\n\n  FAILED_BUILD_STATES = %w[failed errored].freeze\n  COMPLETED_BUILD_STATES = %w[passed aborted] + FAILED_BUILD_STATES\n  IN_PROGRESS_BUILD_STATES = %w[runnable running].freeze\n  STATES = IN_PROGRESS_BUILD_STATES + COMPLETED_BUILD_STATES\n\n  validates :state, inclusion: { in: STATES }\n\n  scope :unsuccessful, -> { where(state: FAILED_BUILD_STATES) }\n\n  def elapsed_time\n    if finished_at && started_at\n      finished_at - started_at\n    elsif started_at\n      Time.current - started_at\n    end\n  end\n\n  def start!(builder)\n    return false unless update_attributes(:state => 'running', :started_at => Time.current, :builder => builder)\n\n    build = build_part.build_instance\n    previous_state, new_state = build.update_state_from_parts!\n\n    if previous_state == new_state\n      # bump build's update_at because update_state_from_parts did not alter the build record\n      build.touch\n    end\n\n    if previous_state != new_state\n      Rails.logger.info(\"Build #{build.id} state is now #{build.state}\")\n      BuildStateUpdateJob.enqueue(build.id)\n    end\n\n    true\n  end\n\n  def finish!(state)\n    return false unless update_attributes(:state => state, :finished_at => Time.current)\n\n    if should_reattempt?\n      # Will only send email if email_on_first_failure is enabled.\n      build_part.build_instance.send_build_status_email!\n      build_part.rebuild!\n    elsif state == 'errored'\n      BuildMailer.error_email(self, error_txt).deliver_now\n    end\n\n    build = build_part.build_instance\n\n    previous_state, new_state = build.update_state_from_parts!\n\n    if previous_state == new_state\n      # bump build's update_at because update_state_from_parts did not alter the build record\n      build.touch\n    end\n\n    if previous_state != new_state\n      Rails.logger.info(\"Build #{build.id} state is now #{build.state}\")\n      BuildStateUpdateJob.enqueue(build.id)\n    end\n\n    true\n  end\n\n  def unsuccessful?\n    FAILED_BUILD_STATES.include?(state)\n  end\n\n  def successful?\n    state == 'passed'\n  end\n\n  def aborted?\n    state == 'aborted'\n  end\n\n  def running?\n    state == 'running'\n  end\n\n  def stopped?\n    COMPLETED_BUILD_STATES.include?(state)\n  end\n\n  def errored?\n    state == 'errored'\n  end\n\n  def should_reattempt?\n    unsuccessful? && build_part.should_reattempt?\n  end\n\n  def error_txt\n    error_artifact = build_artifacts.error_txt.first\n    error_artifact.log_file.read if error_artifact\n  end\n\n  def files\n    build_artifacts.as_json\n  end\nend\n"
  },
  {
    "path": "app/models/build_part.rb",
    "content": "class BuildPart < ActiveRecord::Base\n  # using 'build_instance' instead of 'build' because AR defines `build` for associations, and it wins\n  belongs_to :build_instance, :class_name => \"Build\", :foreign_key => \"build_id\", :inverse_of => :build_parts\n  has_many :build_attempts, :dependent => :destroy, :inverse_of => :build_part\n  validates :kind, :paths, :queue, presence: true\n\n  serialize :paths, Array\n  serialize :options, Hash\n\n  def last_attempt\n    build_attempts.last\n  end\n\n  def create_and_enqueue_new_build_attempt!\n    build_attempt = build_attempts.create!(:state => 'runnable')\n    BuildAttemptJob.enqueue_on(queue.to_s, job_args(build_attempt))\n    build_instance.touch # invalidate the cache of builds#show\n    build_attempt\n  rescue GitRepo::RefNotFoundError\n    # delete the dud build_attempt and re-raise\n    build_attempt.destroy if build_attempt\n\n    raise\n  end\n  alias rebuild! create_and_enqueue_new_build_attempt!\n\n  def job_args(build_attempt)\n    repository = build_instance.repository\n    {\n      \"build_attempt_id\" => build_attempt.id,\n      \"build_kind\" => kind,\n      \"build_ref\" => build_instance.ref,\n      \"branch\" => build_instance.branch_record.name,\n      \"test_files\" => paths,\n      \"repo_name\" => \"#{repository.name}-cache\",  # need to pass -cache for now for compatibility with current kochiku-worker\n      \"test_command\" => build_instance.test_command,\n      \"repo_url\" => repository.url_for_fetching,\n      \"remote_name\" => \"origin\",\n      \"timeout\" => repository.timeout.minutes,\n      \"options\" => options,\n      \"kochiku_env\" => Rails.env,\n    }\n  end\n\n  def status\n    if successful?\n      'passed'\n    else\n      last_attempt.try(:state) || 'unknown'\n    end\n  end\n\n  def successful?\n    build_attempts.any?(&:successful?)\n  end\n\n  def unsuccessful?\n    !successful?\n  end\n\n  def running?\n    started_at && !finished_at\n  end\n\n  def not_finished?\n    !finished_at\n  end\n\n  def to_color\n    case status\n    when 'passed'\n      :green\n    when 'failed', 'errored', 'aborted'\n      :red\n    else\n      :blue\n    end\n  end\n\n  def started_at\n    last_attempt.try(:started_at)\n  end\n\n  def finished_at\n    last_attempt.try(:finished_at)\n  end\n\n  def elapsed_time\n    if finished_at && started_at\n      finished_at - started_at\n    elsif started_at\n      Time.current - started_at\n    end\n  end\n\n  def as_json(options = {})\n    super(options.reverse_merge(methods: :status))\n  end\n\n  def should_reattempt?\n    if successful?\n      false\n    elsif (build_attempts.unsuccessful.count - 1) < retry_count\n      true\n    # automatically retry build parts that errored in less than 60 seconds\n    elsif elapsed_time && elapsed_time < 60 && last_attempt.errored? &&\n          build_attempts.unsuccessful.count < 5\n      true\n    else\n      false\n    end\n  end\nend\n"
  },
  {
    "path": "app/models/repository.rb",
    "content": "require 'remote_server'\n\n# This Repository class should only concern itself with persisting and acting on\n# Repository records in the database. All non-database operations should go\n# through the RemoteServer classes.\nclass Repository < ActiveRecord::Base\n  has_many :branches, :dependent => :destroy\n  has_many :convergence_branches, -> { where(convergence: true) }, class_name: \"Branch\"\n  has_many :builds, through: :branches\n  has_many :build_parts, through: :builds\n  has_many :build_attempts, through: :build_parts\n  validates :host, :name, :url, presence: true\n  validates :name, uniqueness: { scope: :namespace, message: \"^Namespace + Name combination already exists\",\n                                 case_sensitive: false }\n  validates :timeout, numericality: { :only_integer => true }\n  validates :timeout, inclusion: { in: 0..1440, message: 'The maximum timeout allowed is 1440 minutes' }\n  validates :assume_lost_after, numericality: { :only_integer => true }, :allow_nil => true\n  validates :url, uniqueness: true, allow_blank: true\n  validate :validate_url_against_remote_servers\n\n  def self.lookup_by_url(url)\n    remote_server = RemoteServer.for_url(url)\n    repository_namespace = remote_server.attributes.fetch(:repository_namespace)\n    repository_name = remote_server.attributes.fetch(:repository_name)\n    repository_host_and_aliases = remote_server.attributes.fetch(:possible_hosts)\n\n    Repository.find_by(host: repository_host_and_aliases,\n                       namespace: repository_namespace,\n                       name: repository_name)\n  end\n\n  def self.lookup(host:, namespace:, name:)\n    git_server_settings = Settings.git_server(host)\n\n    Repository.find_by(host: [git_server_settings.host, *git_server_settings.aliases].compact,\n                       namespace: namespace,\n                       name: name)\n  end\n\n  # Setting a URL will extract values for host, namespace, and name. This\n  # should not overwrite values for those attributes that were set in the same\n  # session.\n  def url=(value)\n    # this column is deprecated; eventually url will just be a virtual attribute\n    self[:url] = value\n\n    return unless RemoteServer.parseable_url?(value)\n    return unless RemoteServer.valid_git_host?(value)\n\n    attrs_from_remote_server = RemoteServer.for_url(value).attributes\n    self.host = attrs_from_remote_server[:host] unless host_changed?\n    self.namespace = attrs_from_remote_server[:repository_namespace] unless namespace_changed?\n    self.name = attrs_from_remote_server[:repository_name] unless name_changed?\n  end\n\n  def remote_server\n    @remote_server ||= RemoteServer.for_url(url)\n  end\n\n  delegate :base_html_url, :base_api_url, :sha_for_branch, :url_for_fetching, :get_branch_url, :open_pull_request_url, to: :remote_server\n\n  def promotion_refs\n    on_green_update.split(\",\").map(&:strip).reject(&:blank?)\n  end\n\n  def interested_github_events\n    event_types = ['pull_request']\n    event_types << 'push' if run_ci\n    event_types\n  end\n\n  def scm_type\n    Settings.git_server(self.url).type\n  end\n\n  # Public: attempts to lookup a build for the commit under any of the\n  # repository's branches. This is done as an optimization since the contents\n  # of the commit are guaranteed to not have changed.\n  #\n  # Returns: Build AR object or nil\n  def build_for_commit(sha)\n    Build.joins(:branch_record).find_by(:ref => sha, 'branches.repository_id' => self.id)\n  end\n\n  # Public: looks across all of a repository's builds for one with the given\n  # SHA. If one does not exist it creates one on the branch given\n  #\n  # sha    - String: git sha of the commit in question\n  # branch - String or AR Branch: if an existing build is not found a new one will be created on this branch\n  #\n  # Returns: Build AR object\n  def ensure_build_exists(sha, branch)\n    build = build_for_commit(sha)\n    unless build.present?\n      build = branch.builds.create!(ref: sha, state: 'partitioning')\n    end\n    build\n  end\n\n  def name_with_namespace\n    \"#{namespace}/#{name}\"\n  end\n  alias to_param name_with_namespace\n\n  private\n\n  def validate_url_against_remote_servers\n    return unless url.present?\n\n    if RemoteServer.parseable_url?(url)\n      unless RemoteServer.valid_git_host?(url)\n        errors.add(:url, \"host is not in Kochiku's list of git servers\")\n      end\n    else\n      errors.add(:url, 'is not in a format supported by Kochiku')\n    end\n  end\nend\n"
  },
  {
    "path": "app/models/repository_observer.rb",
    "content": "class RepositoryObserver < ActiveRecord::Observer\n  observe :repository\n\n  def after_save(record)\n    record.remote_server.install_post_receive_hook!(record) if setup_hook?\n  end\n\n  def setup_hook?\n    Rails.env.production? || Rails.env.staging?\n  end\nend\n"
  },
  {
    "path": "app/uploaders/base_log_file_uploader.rb",
    "content": "class BaseLogFileUploader < CarrierWave::Uploader::Base\n  storage :file\n\n  def cache_dir\n    Rails.root.join('tmp', 'uploads')\n  end\nend\n"
  },
  {
    "path": "app/uploaders/log_file_uploader.rb",
    "content": "require 'base_log_file_uploader'\n\nclass LogFileUploader < BaseLogFileUploader\n  def store_dir\n    build_attempt_id = model.build_attempt_id\n    build_part_id = model.build_attempt.build_part_id\n    build_id = model.build_attempt.build_part.build_id\n\n    # temporary backwards compatibility for old build artifacts created before the deploy on 08/25/2015\n    if model.build_attempt.created_at < Time.parse(\"2015-08-25 04:12:46 UTC\").utc &&\n       (project_id = model.build_attempt.build_part.build_instance.project_id)\n      project_param = ActiveRecord::Base.connection.select_value(\"select name from projects where id = #{project_id}\")\n      return File.join(project_param, \"build_#{build_id}\", \"part_#{build_part_id}\", \"attempt_#{build_attempt_id}\")\n    end\n\n    repository_param = model.build_attempt.build_part.build_instance.repository.to_param\n    Rails.public_path.join(\"log_files\", repository_param, \"build_#{build_id}\", \"part_#{build_part_id}\", \"attempt_#{build_attempt_id}\")\n  end\nend\n"
  },
  {
    "path": "app/uploaders/on_success_uploader.rb",
    "content": "require 'base_log_file_uploader'\n\nclass OnSuccessUploader < BaseLogFileUploader\n  def store_dir\n    build_id = model.id\n    repository_param = model.repository.to_param\n    Rails.root.join(\"public\", \"log_files\", repository_param, \"build_#{build_id}\")\n  end\nend\n"
  },
  {
    "path": "app/views/branches/health.html.haml",
    "content": "= content_for :title, \"Health of #{@branch.name} branch of #{@repository.name_with_namespace}\"\n= content_for :favicon do\n  - if @current_build\n    = favicon_link_tag image_path(\"#{@current_build.to_color}.png\"), type: 'image/png'\n\n- content_for :header do\n  %ul.links\n    %li= link_to(\"Settings\", repository_edit_path(@repository), class: \"info\")\n    %li= link_to(\"Branches\", repository_branches_path(@repository))\n\n- cache(@branch) do\n  - rate = error_free_pass_rate(@builds)\n  - avg_rebuilds = average_number_of_rebuilds(@builds)\n  - median_seconds_to_success = median_elapsed_time(@builds)\n  .performance\n    - if median_seconds_to_success\n      %div{title: \"Median elapsed time for successful builds on #{@branch.name}\"}\n        %span.label Median time:\n        %span.number #{seconds_to_minutes(median_seconds_to_success)} minutes\n\n    - unless @builds.empty?\n      %div{title: \"Average number of build parts reattempted to get to a successful build\", class: rebuild_count_css_class(avg_rebuilds)}\n        %span.number= format(\"%0.1f\", avg_rebuilds)\n        %span.label rebuilds required on average\n\n      %div{title: \"Percentage of builds with all parts succeeding on the first try\", class: pass_rate_css_class(rate)}\n        %span.number= rate\n        %span.label pass rate on first try\n\n  .stats\n    %h2.subheader Build statistics\n    - if @first_built_date\n      %p First build created on #{@first_built_date.strftime(\"%Y-%m-%d\")}\n    %table.build-stats\n      %thead\n        %tr\n          %th\n          %th Total\n          %th Failures\n          %th Pass Rate\n      %tbody\n        %tr\n          %td All Time (#{@days_since_first_build} days)\n          %td= @total_build_count\n          %td= @total_failure_count\n          %td #{@total_pass_rate}%\n        %tr\n          %td Past 30 days\n          %td= @last30_build_count\n          %td= @last30_failure_count\n          %td #{@last30_pass_rate}%\n        %tr\n          %td Past 7 days\n          %td= @last7_build_count\n          %td= @last7_failure_count\n          %td #{@last7_pass_rate}%\n\n- if @part_climate.count > 0\n  %h2.subheader #{@repository.name}/#{@branch.name} part failure stats over #{@builds.count} builds\n  %table.project-part-info\n    %thead\n      %tr\n        %th.right.whisker Rate\n        %th Part Info\n    - @part_climate.sort.reverse_each do |key, parts_with_failures|\n      %tr\n        %td.right #{key[0]}%\n        %td= format_paths(parts_with_failures.first)\n      %tr\n        %td\n        %td\n          - parts_with_failures.each do |part|\n            = link_to repository_build_part_path(@repository, part.build_instance, part) do\n              %span.part-status{class: [part.status, \"attempt-#{part.build_attempts.size}\"]}\n- else\n  %h2.subheader #{@repository.name}/#{@branch.name} hasn't failed in #{@builds.count} builds\n"
  },
  {
    "path": "app/views/branches/index.html.haml",
    "content": "- content_for :header do\n  %ul.links\n    %li= link_to(\"Repositories\", repositories_path)\n\n.projects\n  %ul\n    - @convergence_branches.each do |branch|\n      %li.build-info.bold\n        = link_to(branch.name, repository_branch_path(branch.repository, branch))\n    - @recently_active_branches.each do |branch|\n      %li.build-info\n        = link_to(branch.name, repository_branch_path(branch.repository, branch))\n"
  },
  {
    "path": "app/views/branches/show.html.haml",
    "content": "= content_for :title, \"#{@repository.name} : #{@branch.name}\"\n= content_for :favicon do\n  - if @current_build\n    = favicon_link_tag image_path(\"#{@current_build.to_color}.png\"), type: 'image/png'\n\n- content_for :header do\n  %ul.links\n    %li= link_to(\"Settings\", repository_edit_path(@repository), class: \"info\")\n    %li= link_to(\"Branches\", repository_branches_path(@repository))\n  = form_for @build, url: request_new_build_repository_branch_path(@repository, @branch) do |f|\n    - if @repository.enabled?\n      = f.submit \"Build\", class: 'build-button'\n    - else\n      = f.submit \"Build\", class: 'build-button', disabled: 'true'\n\n- cache(@branch) do\n  %div.health.button= link_to('Health', health_repository_branch_path(@repository, @branch, 'count' => 12))\n  - rate = error_free_pass_rate(@builds)\n  - avg_rebuilds = average_number_of_rebuilds(@builds)\n  - median_seconds_to_success = median_elapsed_time(@builds)\n  .performance\n    - if median_seconds_to_success\n      %div{title: \"Median elapsed time for successful builds on #{@branch.name}\"}\n        %span.label Median time:\n        %span.number #{seconds_to_minutes(median_seconds_to_success)} minutes\n    - else\n      %div{title: \"Median elapsed time for successful builds on #{@branch.name}\"}\n        %span.label Never built\n\n    - unless @builds.empty?\n      %div{title: \"Average number of build parts reattempted to get to a successful build\", class: rebuild_count_css_class(avg_rebuilds)}\n        %span.number= format('%0.1f', avg_rebuilds)\n        %span.label rebuilds required on average\n\n      %div{title: \"Percentage of builds with all parts succeeding on the first try\", class: pass_rate_css_class(rate)}\n        %span.number= rate\n        %span.label completion rate on first try\n\n%h2.subheader\n  = @repository.namespace + '/' + @repository.name\n  &ndash;\n  = @branch.name\n\n#plot\n\n.select_commit\n  %input{type: \"text\", id: \"build_ref_input\", placeholder: \"Commit Revision\"}\n  %button{onClick: \"goto_ref();\"} Lookup Commit\n\n= content_for :javascript do\n  :javascript\n    $(document).ready(Kochiku.graphBuildTimes(\"#{escape_javascript(@repository.to_param)}\", \"#{escape_javascript(@branch.name)}\"));\n    $(function() {\n      var whisker = $('th.whisker');\n      $('table')\n        .on('mouseenter', '.whisker .part-status', function() {\n          whisker.text($(this).data('ref'));\n        })\n        .on('mouseleave', '.whisker', function() {\n          whisker.text('Previous');\n        });\n    });\n\n    var goto_ref = function() {\n      window.location.href = \"//\" + location.host + \"/builds/\" + $('#build_ref_input').val();\n    };\n\n- if @current_build\n  %table.project-part-info\n    %thead\n      %tr\n        %th.right.whisker Previous\n        %th.status\n          %code.build-status{class: @current_build.state}\n            = link_to @current_build.ref[0, 5], repository_build_path(@repository, @current_build)\n        - display_ruby_version = multiple_ruby_versions?(@current_build)\n        - build_metadata_headers(@current_build, display_ruby_version).each do |header|\n          %th{class: header.downcase.gsub(/\\W+/, '-')}= header\n        %th.type Type\n        %th.right.time Elapsed\n        %th.right.count Attempt\n    - @build_parts.each do |_key, build_parts_by_build|\n      - part = build_parts_by_build.values.first\n      - build_part = build_parts_by_build[@current_build]\n      - cache(part) do\n        %tr{:id => dom_id(part)}\n          %td.right.whisker\n            - total_attempts = 0\n            - @builds[0..-2].each do |previous_build|\n              - previous_part = build_parts_by_build[previous_build]\n              - if previous_part.present?\n                - attempts = previous_part.build_attempts.size\n                - total_attempts += attempts\n                = link_to \"/#{@repository.to_param}/builds/#{previous_build.to_param}/parts/#{previous_part.to_param}\" do\n                  %span.part-status{class: [previous_part.status, \"attempt-#{attempts}\"], title: pluralize(attempts, 'attempt'), data: { ref: previous_build.ref[0, 5] }}\n              - else\n                %span.part-status.attempt-0\n          %td\n            - if build_part\n              %span.part-status{class: build_part.status}\n                = link_to build_part.status.to_s.capitalize, \"/#{@repository.to_param}/builds/#{@current_build.to_param}/parts/#{build_part.to_param}\"\n          - build_metadata_values(@current_build, part, display_ruby_version).each do |value|\n            %td= value\n          %td= part.kind.to_s\n          %td.right.elapsed= build_part.elapsed_time ? duration_strftime(build_part.elapsed_time) : '' if build_part\n          %td.right\n            = build_part.build_attempts.size if build_part\n\n- if @current_build\n  = content_for :javascript do\n    :javascript\n      (function() {\n        var startTimes = #{\n          start_times = {}\n          @current_build.build_parts.each { |part| start_times[dom_id(part)] = part.started_at if part.running? }\n          start_times.to_json\n        };\n        var now = new Date();\n        for(var partDomId in startTimes) {\n          if(!startTimes.hasOwnProperty(partDomId)) {\n            continue;\n          }\n          var startTime = new Date(Date.parse(startTimes[partDomId]));\n          $('.project-part-info tbody').find('tr#' + partDomId + ' > .elapsed').text(\n            Math.round((now-startTime)/60000) + \":\" + (\"00\" + (Math.round((now-startTime)/1000)%60)).slice(-2));\n        }\n      })()\n"
  },
  {
    "path": "app/views/branches/show.json.erb",
    "content": "<%\n  json_data = @branch.attributes\n  json_data['recent_builds'] = @builds\n%>\n<%= json_data.to_json.html_safe %>\n"
  },
  {
    "path": "app/views/branches/show.rss.builder",
    "content": "xml.rss({:version => \"2.0\"}) do\n  xml.channel do\n    xml.title(\"Kochiku RSS Feed\")\n    xml.link(repository_branch_url(@repository, @branch))\n    xml.language(\"en\")\n    xml.ttl(10)\n\n    @builds.each do |build|\n      xml.item do\n        xml.title(\"Build Number #{build.id} #{build_success_in_words(build)}\")\n        xml.pubDate(build.created_at.to_s)\n        xml.guid(repository_build_url(@repository, build))\n        xml.link(repository_build_url(@repository, build))\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "app/views/branches/status_report.xml.builder",
    "content": "xml.Projects do\n  @branches.each do |branch|\n    # currently cimonitor only utilizes the activity attribute\n    xml.Project({\n      :name => branch.repository.to_param + (branch.name == 'master' ? '' : ('/' + branch.name)),\n      :activity => build_activity(branch.builds.last),\n      :lastBuildLabel => branch.builds.last.object_id,\n      :webUrl => repository_branch_url(branch.repository, branch),\n      :lastBuildStatus => (branch.last_completed_build.try(:succeeded?) ? \"Success\" : \"Failure\"),\n      :lastBuildTime => branch.last_completed_build.try(:finished_at).try(:strftime, \"%Y-%m-%dT%H:%M:%SZ\")\n    })\n  end\nend\n"
  },
  {
    "path": "app/views/build_attempts/_build_attempt.html.haml",
    "content": ".attempt{:class => build_attempt.state}\n  = build_attempt.state\n"
  },
  {
    "path": "app/views/build_attempts/stream_logs.html.haml",
    "content": "%h2.subheader\n  = @repository.name_with_namespace\n  &ndash;\n  = link_to @build.branch_record.name, repository_branch_path(@repository, @build.branch_record)\n  &ndash;\n  = link_to repository_build_path(@repository, @build) do\n    %code.build-status{class: @build.state, title: @build.ref}\n      = @build.ref[0, 7]\n  &ndash; Part #{@build_part.id}\n.flash.error{'id' => 'errorMessage', 'style' => \"display: none;\"}\n  Error streaming logs\n%label\n  = check_box_tag :refresh, true, true\n  Refresh\n%div{'id' => 'loadFull', 'style' => 'display: none;'}\n  (skipping n bytes)\n\n%br\n\n.log_contents{'id' => 'log_content_display'}\n%img{:src => image_url('loader.gif'), 'id' => \"loading_img\"}\n\n= content_for :javascript do\n  :javascript\n    var currentPos = -1;\n    var finished = false;\n    var refreshIntervalId;\n    var refreshInterval = 5000;\n    var badRequests = 0;\n    var startPos = -1;\n\n    // autoscroll to the bottom to follow tail of log\n    var scrolled_to_bottom = function() {\n      return ((window.innerHeight + window.scrollY) >= document.body.scrollHeight);\n    }\n\n    var get_logs_chunk = function() {\n      if($('input#refresh').is(':checked') && !finished) {\n        $.getJSON(\"#{stream_logs_chunk_path(@build_attempt.id)}?start=\" + currentPos, function( data ) {\n          log_entry = data['Contents'];\n          current_value = $('#log_content_display').text();\n          var scrolled = scrolled_to_bottom();\n          $('#log_content_display').text(current_value + log_entry);\n          if (scrolled) {\n            window.scrollTo(0,document.body.scrollHeight);\n          }\n\n          // first request: use it to determine whether any bytes have been skipped\n          if (currentPos == -1) {\n            startPos = data['Start'];\n            if (data['Start'] > 0) {\n              $('#loadFull').text(\"(skipping \" + startPos + \" bytes)\");\n              $('#loadFull').show();\n            }\n          }\n          currentPos = data['Start'] + data['BytesRead'];\n\n          if (data['state'] != 'running' && data['BytesRead'] == 0) {\n            finished = true;\n            $('#loading_img').hide();\n          }\n        })\n          .fail(function() {\n            clearInterval(refreshIntervalId);\n            badRequests += 1;\n            if (badRequests < 3) {\n              refreshInterval *= 1.25;\n              refreshIntervalId = window.setInterval(get_logs_chunk, refreshInterval);\n            }\n            else {\n              $('#errorMessage').show();\n              $('#loading_img').hide();\n            }\n          });\n      }\n    };\n\n    $('input#refresh').click(function () {\n      if ($('input#refresh').is(':checked')) {\n        if (!finished) {\n          $('#loading_img').show();\n        }\n      } else {\n        $('#loading_img').hide();\n      }\n    });\n\n    $(document).ready(function() {\n      window.scrollTo(0,document.body.scrollHeight);\n      get_logs_chunk();\n      refreshIntervalId = window.setInterval(get_logs_chunk, refreshInterval);\n    });\n"
  },
  {
    "path": "app/views/build_mailer/build_break_email.html.haml",
    "content": "%html{xmlns: \"http://www.w3.org/1999/html\"}\n  %head\n    %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'}\n  %body\n    %h1 #{@build.branch_record.name} build failed for #{@build.repository.name}\n    = link_to('Link to build.', repository_build_url(@build.repository, @build))\n    - if @pr_link\n      = link_to('Link to PR.', @pr_link)\n    - if @build.build_parts.count > 1\n      The build was sharded into #{@build.build_parts.count} parts and took #{@build.elapsed_time.to_i / 60} minutes.\n      (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running.)\n      %br\n      Without sharding the build would have taken #{@build.linear_time.to_i / 60} minutes to run.\n      %br\n    - else\n      The build took #{@build.elapsed_time.to_i / 60} minutes.\n      (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running.)\n      %br\n    %h2 Failed build parts:\n    %ul\n      - @failed_build_parts.each do |failed_build_part|\n        %li\n          = link_to(\"Part: #{failed_build_part.kind}, number #{failed_build_part.id}\", repository_build_part_url(@build.repository, @build, failed_build_part))\n          = failed_build_part_sentence(failed_build_part)\n        %span.broken-path{style: \"font-size: smaller; color: gray;\"} - #{failed_build_paths(failed_build_part)}\n    %br\n    - @responsible_email_and_files.each do |email, files|\n      #{email} was emailed because of changes to:\n      %ul\n        - files.each do |file|\n          %li= file\n      %br\n\n    %h2 Changes #{@build.branch_record.convergence? ? 'since last success' : 'included in build'}\n    - @git_changes.each do |git_change|\n      %b SHA: #{link_to(git_change[:hash], @build.repository.remote_server.href_for_commit(git_change[:hash]))}\n      %br\n      %b Committer:\n      = git_change[:author]\n      %br\n      %b Date:\n      = git_change[:date]\n      %br\n      %pre= git_change[:message]\n      %br\n      %br\n"
  },
  {
    "path": "app/views/build_mailer/build_break_email.text.erb",
    "content": "<%= @build.branch_record.name %> build failed for <%= @build.repository.name %>\n<%= repository_build_url(@build.repository, @build) %>\n\n<% if @build.build_parts.count > 1 %>\nThe build was sharded into <%= @build.build_parts.count %> parts and took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m running.)\nWithout sharding the build would have taken <%= @build.linear_time.to_i/60 %> minutes to run.\n<% else %>\nThe build took <%= @build.elapsed_time.to_i/60 %> minutes. (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m running.)\n<% end %>\n\n<% @responsible_email_and_files.each do |email, files| %>\n<%= email %> was emailed because of changes to <%= files.join(\", \") %>\n<% end %>\n\n--------------------------------------------------------------------------------\n<%= \"Changes #{@build.branch_record.convergence? ? 'since last success' : 'included in build'}\" %>\n--------------------------------------------------------------------------------\n\n<% @git_changes.each do |git_change| %>\n  SHA: <%= git_change[:hash] %>\n  Committer: <%= git_change[:author] %>\n  Date: <%=  git_change[:date] %>\n\n      <%= git_change[:message] %>\n\n\n<% end %>\n"
  },
  {
    "path": "app/views/build_mailer/build_success_email.html.haml",
    "content": "%html{xmlns: \"http://www.w3.org/1999/html\"}\n  %head\n    %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'}\n  %body\n    %h1 #{@build.branch_record.name} build succeeded for #{@build.repository.name}\n    = link_to('Link to build.', repository_build_url(@build.repository, @build))\n    - if @pr_link\n      = link_to('Link to PR.', @pr_link)\n    - if @build.build_parts.count > 1\n      The build was sharded into #{@build.build_parts.count} parts and took #{@build.elapsed_time.to_i / 60} minutes.\n      (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running#{@build.retry_count > 0 ? \", requiring #{@build.retry_count} retries.)\" : \".)\"}\n      %br\n      Without sharding the build would have taken #{(@build.linear_time.to_i / 60) * (1 + @build.max_retries)} minutes to run.\n      %br\n      - if @build.retry_count > 0\n        (#{@build.linear_time.to_i / 60}m building, requiring #{@build.max_retries} retries.)\n    - else\n      The build took #{@build.elapsed_time.to_i / 60} minutes.\n      (#{@build.idle_time.to_i / 60}m idle, #{@build.longest_build_part.to_i / 60}m running#{@build.retry_count > 0 ? \", requiring #{@build.retry_count} retries.)\" : \".)\"}\n      %br\n    %h2 Changes included in build\n    - @git_changes.each do |git_change|\n      %b SHA: #{link_to(git_change[:hash], @build.repository.remote_server.href_for_commit(git_change[:hash]))}\n      %br\n      %b Committer:\n      = git_change[:author]\n      %b Date:\n      = git_change[:date]\n      %pre= git_change[:message]\n      %br\n      %br\n"
  },
  {
    "path": "app/views/build_mailer/build_success_email.text.erb",
    "content": "<%= @build.branch_record.name %> build succeeded for <%= @build.repository.name %>\n<%= repository_build_url(@build.repository, @build) %>\n<% if @build.build_parts.count > 1 %>\n  The build was sharded into <%= @build.build_parts.count %> parts and took <%= @build.elapsed_time.to_i/60 %> minutes.\n  (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m\n  <% if @build.retry_count > 0 %>\n    running, requiring <%= @build.retry_count %> retries.)\n  <% else %>\n    running.)\n  <% end %>\n  Without sharding the build would have taken <%= (@build.linear_time.to_i/60) * (1 + @build.max_retries) %> minutes to run.<br>\n  <% if @build.retry_count > 0 %>\n    (<%= @build.linear_time.to_i/60 %>m building, requiring <%= @build.max_retries %> retries.)\n  <% end %>\n<% else %>\n  The build took <%= @build.elapsed_time.to_i/60 %> minutes.\n  (<%= @build.idle_time.to_i/60 %>m idle, <%= @build.longest_build_part.to_i/60 %>m\n  <% if @build.retry_count > 0 %>\n    running, requiring <%= @build.retry_count %> retries.)\n  <% else %>\n    running.)\n  <% end %>\n<% end %>\n\n--------------------------------------------------------------------------------\nChanges included in build\n--------------------------------------------------------------------------------\n\n<% @git_changes.each do |git_change| %>\n  SHA: <%= git_change[:hash] %>\n  Committer: <%= git_change[:author] %>\n  Date: <%=  git_change[:date] %>\n\n      <%= git_change[:message] %>\n\n\n<% end %>\n"
  },
  {
    "path": "app/views/build_mailer/error_email.html.haml",
    "content": "%html{xmlns: \"http://www.w3.org/1999/html\"}\n  %head\n    %meta{'content' => 'text/html; charset=UTF-8', 'http-equiv' => 'Content-Type'}\n  %body\n    %h2 Kochiku error on #{@builder}\n    = repository_build_part_url(@build_part.build_instance.repository, @build_part.build_instance, @build_part)\n\n    %h2 Error text\n    %pre= @error_text\n"
  },
  {
    "path": "app/views/build_mailer/error_email.text.erb",
    "content": "Kochiku error on <%= @builder %> for <%= repository_build_part_url(@build_part.build_instance.repository, @build_part.build_instance, @build_part) %>\n\n<%= @error_text %>\n"
  },
  {
    "path": "app/views/build_parts/_build_attempts.html.haml",
    "content": "%tr{id: dom_id(attempt), :\"data-id\" => index + 1}\n  %td.right= link_to(index + 1, attempt)\n  %td\n    %span.attempt-status{:class => attempt.state}= attempt.state.to_s.capitalize\n  %td.rank= build_attempts_rank[attempt.id.to_s]\n  %td= attempt.started_at\n  %td.right.elapsed= duration_strftime(attempt.elapsed_time)\n  %td.right= attempt.builder ? attempt.builder.sub(\".#{Settings.domain_name}\", '') : \"pending\"\n  %td\n    - if attempt.running? && attempt.log_streamer_port.present?\n      = link_to(\"stdout.log (in progress)\", stream_logs_path(attempt.id))\n    - else\n      - attempt.build_artifacts.sort_by { |artifact| artifact.log_file.path }.each do |artifact|\n        = link_to File.basename(artifact.log_file.path), artifact\n        %br\n  %td.wrap\n    - unless attempt.stopped?\n      = link_to(\"Abandon\", finish_build_attempt_path(attempt, :state => 'aborted'), :method => :post)\n"
  },
  {
    "path": "app/views/build_parts/_build_part.html.haml",
    "content": ".part{:class => build_part.status, :title => build_part.paths.map{|path| \"-#{path}\"}.join(\"<br>\")}\n  -# build_part.build_instance.repository is important to prevent n+1 queries here. Using the :through does a bunch of SQL.\n  %a.part-wrapper{:href => repository_build_part_path(build_part.build_instance.repository, build_part.build_instance, build_part)}\n    .kind\n      - case build_part.kind\n      - when \"spec\"\n        Specs\n      - when \"cucumber\"\n        Cukes\n      - else\n        = build_part.kind\n    - if build_part.build_attempts.any?\n      .attempts\n        = render build_part.last_attempt\n        - if build_part.build_attempts.size > 1\n          = \"...\"\n"
  },
  {
    "path": "app/views/build_parts/show.html.haml",
    "content": "= content_for :title do\n  = @build.ref[0, 7]\n  &ndash;\n  = @repository.name\n= content_for :favicon do\n  = favicon_link_tag image_path(\"#{@build_part.to_color}.png\"), :type => 'image/png'\n\n%h2.subheader\n  = link_to(@build.repository.name_with_namespace, repository_branches_path(@build.repository))\n  &ndash;\n  = link_to(@build.branch_record.name, repository_branch_path(@build.repository, @build.branch_record))\n  &ndash;\n  = link_to repository_build_path(@repository, @build) do\n    %code.build-status{class: @build.state, title: @build.ref}\n      = @build.ref[0, 7]\n  &ndash; #{@build_part.kind} (part #{@build_part.id})\n\n  .actions\n    %label\n      - if @repository.enabled?\n        = link_to(\"Rebuild\", rebuild_repository_build_part_path(@repository, @build, @build_part), method: :post, class: \"rebuild button\")\n    %label\n      = check_box_tag :refresh, true, @build_part.not_finished?\n      Refresh\n\n.build-info.build-info-subheader\n  %span.info\n    %span.status{:class => 'build-part-' + @build_part.status.to_s}= @build_part.status.to_s.capitalize\n    on\n    %span.queue #{@build_part.queue} queue\n\n%table.build-part-info\n  %thead\n    %tr\n      %th.right.count Attempt\n      %th.status Status\n      %th.queue-position Position\n      %th Started At\n      %th.right.time Elapsed Time\n      %th.right.worker Worker\n      %th Build Artifacts\n      %th.right.actions Actions\n  %tbody\n    - @build_part.build_attempts.each_with_index do |attempt, index|\n      = render partial: 'build_parts/build_attempts', locals: {attempt: attempt, index: index, build_attempts_rank: @build_attempts_rank}\n\n%ol#build-paths\n  - if @build_part.options['total_workers'] && @build_part.options['worker_chunk']\n    %li Chunk #{@build_part.options['worker_chunk']} of #{@build_part.options['total_workers']}\n  - @build_part.paths.each do |path|\n    %li= path\n\n= content_for :javascript do\n  :javascript\n    if ($('.build-part-info tbody tr').length > 0) {\n      $('.build-part-info').tablesorter({ sortList: [ [0, 0] ] });\n      StartTimes = #{\n        # rubocop:disable Style/IndentationConsistency\n        start_times = {}\n        @build_part.build_attempts.each_with_index { |attempt, index| start_times[index + 1] = attempt.started_at }\n        start_times.to_json\n      };\n    }\n\n    if ( \"Notification\" in window && Notification.permission == \"default\") {\n      Notification.requestPermission();\n    }\n\n    Kochiku.buildInfo = {table: '.build-part-info tbody', renderTime: Date.parse(\"#{raw @build_part.updated_at}\"), state: \"#{@build.state}\"};\n\n    Kochiku.buildInfo.id = #{@build.id};\n    Kochiku.buildInfo.branch = \"#{@build.branch_record.name}\";\n    Kochiku.buildInfo.repo = \"#{@build.repository.name}\";\n\n    Kochiku.terminalStates = #{raw BuildAttempt::COMPLETED_BUILD_STATES};\n    Kochiku.doneMessage = \"BuildPart on \"\n\n    Kochiku.delayedRefresh(Kochiku.buildInfo);\n"
  },
  {
    "path": "app/views/builds/_build.html.haml",
    "content": ".build\n  %a.build-wrapper{:href => repository_build_path(build.repository, build)}\n    .build-info\n      .ref= build.ref\n    %h3.build-id{:class => build.state}= build.id\n    .times\n      .time-started\n        Started at\n        = time_for(build.created_at, \"%m/%d %I:%M%P\")\n      .time-elapsed\n        - if build.completed?\n          Built in\n          %strong\n            = duration_strftime(build.elapsed_time, \"%Hh %Mm %Ss\")\n\n  .build-state\n    %span.info\n      %span.state{:class => 'build-' + build.state.to_s}= build.state.to_s.capitalize\n      on\n      %span.queue= build.queue.to_s.capitalize\n    - if build.branch_record.convergence?\n      %a.info{:href => show_link_to_compare(build, build.previous_successful_build.try(:ref), build.ref), :title => 'show changes since last green build'}\n        Compare to last green build\n    - elsif build.succeeded?\n      %a.info{:href => show_link_to_create_pull_request(build), :title => 'create a pull request against master'}\n        Create pull request\n    - if build.on_success_script_log_file.present?\n      = link_to File.basename(build.on_success_script_log_file.to_s), build.on_success_script_log_file.url, :class => :info\n    %a.info.last{:href => show_link_to_commit(build.repository, build.ref)}\n      Show HEAD commit\n\n  .parts= render build.build_parts\n"
  },
  {
    "path": "app/views/builds/_build_parts.html.haml",
    "content": "- display_ruby_version = multiple_ruby_versions?(build)\n%tr{:\"data-id\" => part.id}\n  %td.right= link_to(part.id, repository_build_part_path(build.repository, build, part))\n  %td\n    %span.part-status{:class => part.status}\n      - text = part.status.to_s.capitalize\n      - if part.status == 'running' && part.last_attempt.log_streamer_port.present?\n        = link_to(text, stream_logs_path(part.last_attempt.id))\n      - elsif (artifact = part.most_recent_stdout_artifact)\n        = link_to(text, artifact, :title => 'Last completed stdout.log')\n      - else\n        = text\n  - position = build_parts_position[part.id]\n  - if position.present?\n    %td{:class => 'queue-position-value'}\n      = position\n  - else\n    %td\n  - build_metadata_values(build, part, display_ruby_version).each do |value|\n    %td= value\n  %td= part.kind.to_s\n  %td.right\n    - builder = part.last_attempt.try(:builder)\n    = builder ? builder.sub(\".#{Settings.domain_name}\", '') : \"pending\"\n  %td.right.elapsed= part.elapsed_time ? duration_strftime(part.elapsed_time) : \"pending\"\n  %td.right= part.build_attempts.size\n  %td.right\n    - if part.unsuccessful? && repository.enabled?\n      = link_to(\"Rebuild\", rebuild_repository_build_part_path(build.repository, build, part), :method => :post)\n"
  },
  {
    "path": "app/views/builds/show.html.haml",
    "content": "= content_for :title do\n  = @build.ref[0, 7]\n  &ndash;\n  = @build.repository.name\n= content_for :favicon do\n  = favicon_link_tag image_path(\"#{@build.to_color}.png\"), :type => 'image/png'\n\n%h2.subheader\n  = @build.repository.name\n  &ndash;\n  = link_to(@build.branch_record.name, repository_branch_path(@build.repository, @build.branch_record))\n  &ndash;\n  %code.build-status{class: @build.state, title: @build.ref}\n    = @build.ref[0, 7]\n  %a.info{:href => show_link_to_commit(@build.repository, @build.ref)}\n    Show\n  - if @build.branch_record.convergence?\n    %a.info{:href => show_link_to_compare(@build, @build.previous_successful_build.try(:ref), @build.ref), :title => 'show changes since last green build'}\n      Compare to last green build\n\n  .actions\n    - if @build.succeeded?\n      = button_to \"Sync status to #{@build.repository.scm_type}\", resend_status_repository_build_path(@build.repository, @build), :method => :post\n    - if @build.repository.allows_kochiku_merges?\n      %form{action: toggle_merge_on_success_repository_build_path(@build.repository, @build), method: :post}\n        %label\n          = check_box_tag :merge_on_success, true, @build.merge_on_success_enabled?, disabled: !eligible_for_merge_on_success?(@build), onchange: 'this.form.submit()'\n          Merge on Success\n\n    %label\n      = check_box_tag :refresh, true, @build.is_running?\n      Refresh\n\n.build-info.build-info-subheader\n  - if @build.succeeded? && !@build.branch_record.convergence?\n    %a.info{:href => show_link_to_create_pull_request(@build), :title => 'create a pull request against master'}\n      Create pull request\n  %span.info\n    Created\n    = timeago(@build.created_at)\n  %span.info\n    Updated\n    = timeago(@build.updated_at, :id => \"time-since-update\")\n  - if @build.completed?\n    %span.info\n      Built in #{duration_strftime(@build.elapsed_time, \"%Hh %Mm %Ss\")}\n  - if @build.completed? && @build.failed? && @build.build_parts.present?\n    %span.info\n      #{@build.build_parts.failed.count} out of #{@build.build_parts.count} build parts failed\n  - if @build.is_running?\n    %span.info\n      = button_to \"Abort Build\", abort_repository_build_path(@build.repository, @build), method: :patch, class: \"abort-build\"\n  - if @repository.enabled? && (@build.failed? || @build.aborted?)\n    %span.info\n      - if @build.build_parts.empty? || !@build.error_details.empty?\n        = button_to \"Retry Partitioning\", retry_partitioning_repository_build_path(@build.repository, @build), :method => :post, :form_class => \"retry-partitioning\"\n      - else\n        = button_to \"Rebuild failed parts\", rebuild_failed_parts_repository_build_path(@build.repository, @build), :method => :post, :form_class => \"rebuild-parts\"\n\n- if @build.error_details.present?\n  .build-error\n    %h2 Build error\n    %pre= [@build.error_details[:message], @build.error_details[:backtrace]].join(\"\\n\")\n- if @build.succeeded? && @build.build_parts.count == 0\n  .build-empty\n    %h2 Build Empty\n    %span.info\n      Partitioner did not return any work for this build.\n%table.build-summary#build-summary\n  %thead\n    %tr\n      %th.right.id Part\n      %th.status Status\n      %th.queue-position Position\n      - display_ruby_version = multiple_ruby_versions?(@build)\n      - build_metadata_headers(@build, display_ruby_version).each do |header|\n        %th{class: header.downcase.gsub(/\\W+/, '-')}= header\n      %th.type Type\n      %th.right.worker Worker\n      %th.right.time Elapsed Time\n      %th.right.count Attempt\n      %th.right.actions Actions\n  %tbody\n    - @build.build_parts.decorate.each do |part|\n      = render partial: 'builds/build_parts', locals: {part: part, build: @build, build_parts_position: @build_parts_position, repository: @repository}\n\n- if @build.on_success_script_log_file.url\n  = link_to(File.basename(@build.on_success_script_log_file.path), @build.on_success_script_log_file.url)\n= content_for :javascript do\n  :javascript\n    if ($('#build-summary tbody tr').length > 0) {\n      $('#build-summary').tablesorter({ sortList: [ [1, 0] ] });\n      StartTimes = #{\n        # rubocop:disable Style/IndentationConsistency\n        start_times = {}\n        @build.build_parts.each { |part| start_times[part.id] = part.started_at }\n        start_times.to_json\n      };\n    }\n\n    if ( \"Notification\" in window && Notification.permission == \"default\") {\n      Notification.requestPermission();\n    }\n\n    Kochiku.buildInfo = {table: '.build-summary tbody', renderTime: Date.parse(#{raw @build.updated_at.to_json}), state: \"#{@build.state}\"};\n    Kochiku.buildInfo.id = #{@build.id};\n    Kochiku.buildInfo.branch = \"#{@build.branch_record.name}\";\n    Kochiku.buildInfo.repo = \"#{@build.repository.name}\";\n\n    Kochiku.terminalStates = #{raw Build::TERMINAL_STATES};\n    Kochiku.delayedRefresh(Kochiku.buildInfo);\n    Kochiku.doneMessage = \"Build on \"\n\n    $('abbr.timeago').click(function() {\n      // swap the relative time with the absolute time\n      var originalText = $(this).text();\n      $(this).text($(this).attr('title'));\n      $(this).attr('title', originalText);\n    });\n"
  },
  {
    "path": "app/views/dashboards/build_history_by_worker.html.haml",
    "content": "#worker-health-wrap\n  %table{:class => \"worker-health\"}\n    %thead\n      %th.right Worker\n      %th Partition Attempts\n    %tbody\n      %tr\n        %td.right= \"Partition workers\"\n        %td\n          - @partition_jobs.each do |partition_job|\n            = link_to build_redirect_path(partition_job) do\n              - if !partition_job.build_parts.empty?\n                %span.attempt-status{:class => :passed}\n                // ugly hack because in this table, partitioning means \"in progress\", whereas in\n                // the following build attempts table, it means not started yet\n              - elsif partition_job.state == 'partitioning'\n                %span.attempt-status{:class => :running}\n              - else\n                %span.attempt-status{:class => partition_job.state}\n  %br\n  %table{:class => \"worker-health\"}\n    %thead\n      %th.right Worker\n      %th Build Attempts\n    %tbody\n      - @workers.each do |worker_name, build_attempts|\n        %tr\n          %td.right= worker_name\n          %td\n            - build_attempts.each do |build_attempt|\n              = link_to build_attempt do\n                %span.attempt-status{:class => build_attempt.state}\n"
  },
  {
    "path": "app/views/layouts/application.html.haml",
    "content": "!!!\n%html{:lang => 'en'}\n  %head\n    %meta{:charset => 'utf-8'}\n    %meta{:name => 'google', :value => 'notranslate'}\n    %title\n      - if (title = yield(:title)).present?\n        #{title} &ndash;\n      Kochiku\n    = stylesheet_link_tag 'tablesorter.theme.kochiku.css'\n    = stylesheet_link_tag 'tipTip.css', :media => 'screen'\n    = stylesheet_link_tag 'screen.css', :media => 'all'\n    = csrf_meta_tag\n    - if (favicon = yield(:favicon)).present?\n      = favicon\n    - else\n      = favicon_link_tag '/favicon.ico'\n  %body\n    #page\n      .section-wrapper#header\n        .section\n          %a.logo{:href => root_path, :title => \"Home\"}\n            %h1\n              %b Kochiku\n              %ruby.translation\n                構 <rt>こう</rt>\n                築 <rt>ちく</rt>\n          .header-right\n            = yield :header\n      - if @repository && !@repository.enabled?\n        .section-wrapper#disabled-repo-alert\n          .section\n            .warn This repository is currently disabled.\n      .section-wrapper#content\n        .section\n          - if flash[:error].present?\n            .flash.error\n              = flash[:error]\n          - if flash[:warn].present?\n            .flash.warn\n              = flash[:warn]\n          - if flash[:message].present?\n            .flash.message\n              = flash[:message]\n          = yield\n\n      #nav\n        .section= link_to(\"Build taking too long? (Resque Admin)\", \"/resque\")\n        .section= link_to(\"Worker Health\", build_history_by_worker_path(count: 5000))\n\n  = javascript_include_tag 'application'\n  :javascript\n    $(document).ready(function() {\n      $('.part').tipTip({\n        delay: 100,\n        maxWidth: \"auto\",\n        edgeOffset: 10,\n        fadeIn: 100,\n        fadeOut: 100\n      });\n    });\n  = yield :javascript\n"
  },
  {
    "path": "app/views/merge_mailer/merge_failed.text.erb",
    "content": "Kochiku build automatically merged build: <%= [@build.repository.to_param, @build.id, @build.branch_record.name].join(\", \") %>.\n\nstdout & stderr:\n\n<%= @stdout_and_stderr %>\n"
  },
  {
    "path": "app/views/merge_mailer/merge_successful.html.erb",
    "content": "<p>\n  Kochiku automatically merged build:\n  <%= @build.repository.to_param %>, <%= @build.id %>, <%= link_to_branch @build %>.\n</p>\n\n<p>\n  The ref for the merge is <%= link_to_commit(@build.repository, @merge_commit) %>.\n</p>\n\nstdout &amp; stderr:\n\n<pre>\n<%= @stdout_and_stderr %>\n</pre>\n"
  },
  {
    "path": "app/views/repositories/_form.html.haml",
    "content": "= form_for @repository, :url => form_url, :html => { :id => 'repository-form' } do |f|\n  = f.error_messages\n  %div\n    %label{:for => \"url\"} Repository URL:\n    = f.text_field :url, :placeholder => '', :autocapitalize => 'off', :autocorrect => 'off', :spellcheck => 'false'\n  %div\n    %label{:for => \"convergence_branches\"} Convergence Branches:\n    = text_field_tag :convergence_branches, @current_convergence_branches.join(', '), placeholder: \"master\", autocapitalize: 'off', autocorrect: 'off', spellcheck: 'false'\n  - if @repository.test_command.present?\n    %div{title: 'Test command should now be specified in the kochiku.yml'}\n      %label{:for => \"test_command\"} Test Command: (Deprecated)\n      = f.text_field :test_command, :id => 'test_command', :disabled => true\n  %div\n    %label{:for => \"timeout\"} Timeout a build part after:\n    = f.text_field :timeout, :id => \"timeout\", :class => \"short\"\n    minutes\n  %div\n    %label{:for => \"assume_lost_after\"} Assume that a build has been lost if its still running after:\n    = f.text_field :assume_lost_after, :id => \"assume_lost_after\", :class => \"short\"\n    minutes\n  %div\n    %label{:for => \"run_ci\"} Trigger build on push to master:\n    = f.check_box :run_ci, :id => \"run_ci\"\n  %div\n    %label{:for => \"enabled\"} Enable repository:\n    = f.check_box :enabled, :id => \"enabled\"\n\n\n  %fieldset\n    %legend Pull Requests\n    %div\n      %label{:for => \"build_pull_requests\"} Build pull requests:\n      = f.check_box :build_pull_requests, :id => \"build_pull_requests\"\n    %div\n      %label{:for => \"allows_kochiku_merges\"} Allow developers to request branches be merged into master on success:\n      = f.check_box :allows_kochiku_merges, :id => \"allows_kochiku_merges\"\n\n      - display_css = @repository.allows_kochiku_merges ? '' : 'display: none'\n      %span{:id => 'branch-delete-warning', :style => display_css} Warning: Kochiku will delete the branch from Git after merging.\n    %div\n      %label{:for => \"send_merge_successful_email\"} Send email on automatic merge success:\n      = f.check_box :send_merge_successful_email, :id => \"send_merge_successful_email\", :disabled => @repository.allows_kochiku_merges ? nil : true\n\n  %fieldset\n    %legend On a green build\n    %div\n      %label{:for => \"on_green_update\"} Update branches to last green commit:\n      = f.text_field :on_green_update, :id => \"on_green_update\", :placeholder => \"Comma separated list of branch names\"\n    %div\n      %label{:for => \"send_build_success_email\"} Send email to contributers:\n      = f.check_box :send_build_success_email, :id => \"send_build_success_email\"\n\n  %fieldset\n    %legend On a red build\n    %div\n      %label{:for => \"send_build_failure_email\"} Send email to build breakers:\n      = f.check_box :send_build_failure_email, :id => \"send_build_failure_email\"\n    %div\n      %label{:for => \"email_on_first_failure\"} Email on first build part failure for branch builds:\n      = f.check_box :email_on_first_failure, :id => \"email_on_first_failure\"\n\n  = f.submit @repository.new_record? ? \"Create\" : \"Update\"\n\n- unless @repository.new_record?\n  = button_to \"Delete\", repository_path(@repository.id), method: :delete, form_class: \"delete-form\", class: \"danger-button\", data: {confirm: \"This is a permanent destructive action, are you sure?\"}\n\n= content_for :javascript do\n  :javascript\n    $('#allows_kochiku_merges').change(function(){\n      $('#branch-delete-warning').toggle($(\"#allows_kochiku_merges\").is(':checked'));\n      $('#send_merge_successful_email').prop(\"disabled\", !$(\"#allows_kochiku_merges\").is(':checked'));\n    });\n    $('#repository-form').submit(function(event) {\n      // Manually update the hidden element generated by the Rails check_box helper.\n      //\n      // This workaround is neccessary for send_merge_successful_email to\n      // maintain it's value when its input is is 'disabled'.\n      $(\"input[name='repository[send_merge_successful_email]'][type='hidden']\").val(\n        $(\"input[name='repository[send_merge_successful_email]'][type='checkbox']\").is(':checked')\n      );\n    });\n"
  },
  {
    "path": "app/views/repositories/dashboard.html.haml",
    "content": "- content_for :header do\n  %ul.links\n    %li= link_to(\"Repositories\", repositories_path)\n\n.projects.projects-grid\n  - @branches.each do |branch|\n    - cache(branch) do\n      %div.ci-build-info{:class => \"ci-#{branch.most_recent_build_state}\"}\n        %div.project-name\n          = link_to(branch.repository.name, repository_branch_path(branch.repository, branch))\n        - if branch.most_recent_build.try(:is_running?)\n          %div.state{:class => \"build-#{branch.most_recent_build_state}\"}\n            = branch.most_recent_build_state.to_s.capitalize\n        - if branch.last_completed_build && branch.last_completed_build.finished_at\n          %div.state\n            Last built\n            = timeago(branch.last_completed_build.finished_at)\n        - else\n          %div.state Never built\n        - if branch.last_completed_build\n          %div.state{:class => \"build-#{branch.last_completed_build.state}\"}\n            = branch.last_completed_build.state.to_s.capitalize\n            = \"in \" + distance_of_time_in_words(branch.last_build_duration) if branch.last_build_duration\n        %div.project-link\n          = link_to(\"all branches\", repository_branches_path(branch.repository))\n"
  },
  {
    "path": "app/views/repositories/edit.html.haml",
    "content": "= render 'form', form_url: repository_url(@repository.id)\n\n%br\n%h3 Web Hooks\n%p\n  %strong Build SHA\n  %br\n  = link_to build_ref_repository_url(@repository.id, ref: 'master', sha: 'abc123')\n  %br\n  %em.hint\n    POST to this URL from your source control system to trigger a build\n    of the given branch/SHA.\n"
  },
  {
    "path": "app/views/repositories/index.html.haml",
    "content": ".repositories\n  .new-repository-link\n    = link_to(\"Add Repository\", new_repository_path)\n\n  %ul\n    - @repositories.each do |repository|\n      %li.build-info\n        = link_to(repository.url, repository_edit_path(repository))\n"
  },
  {
    "path": "app/views/repositories/new.html.haml",
    "content": "= render 'form', form_url: repositories_url\n"
  },
  {
    "path": "bin/bundle",
    "content": "#!/usr/bin/env ruby\nENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)\nload Gem.bin_path('bundler', 'bundle')\n"
  },
  {
    "path": "bin/rails",
    "content": "#!/usr/bin/env ruby\nbegin\n  load File.expand_path('../spring', __FILE__)\nrescue LoadError => e\n  raise unless e.message.include?('spring')\nend\nAPP_PATH = File.expand_path('../../config/application', __FILE__)\nrequire_relative '../config/boot'\nrequire 'rails/commands'\n"
  },
  {
    "path": "bin/rake",
    "content": "#!/usr/bin/env ruby\nbegin\n  load File.expand_path('../spring', __FILE__)\nrescue LoadError => e\n  raise unless e.message.include?('spring')\nend\nrequire_relative '../config/boot'\nrequire 'rake'\nRake.application.run\n"
  },
  {
    "path": "bin/setup",
    "content": "#!/usr/bin/env ruby\nrequire 'pathname'\n\n# path to your application root.\nAPP_ROOT = Pathname.new File.expand_path('../../',  __FILE__)\n\nDir.chdir APP_ROOT do\n  # This script is a starting point to setup your application.\n  # Add necessary setup steps to this file:\n\n  puts \"== Installing dependencies ==\"\n  system \"gem install bundler --conservative\"\n  system \"bundle check || bundle install\"\n\n  # puts \"\\n== Copying sample files ==\"\n  # unless File.exist?(\"config/database.yml\")\n  #   system \"cp config/database.yml.sample config/database.yml\"\n  # end\n\n  puts \"\\n== Preparing database ==\"\n  system \"bin/rake db:setup\"\n\n  puts \"\\n== Removing old logs and tempfiles ==\"\n  system \"rm -f log/*\"\n  system \"rm -rf tmp/cache\"\n\n  puts \"\\n== Restarting application server ==\"\n  system \"touch tmp/restart.txt\"\nend\n"
  },
  {
    "path": "bin/spring",
    "content": "#!/usr/bin/env ruby\n\n# This file loads spring without using Bundler, in order to be fast.\n# It gets overwritten when you run the `spring binstub` command.\n\nunless defined?(Spring)\n  require 'rubygems'\n  require 'bundler'\n\n  if (match = Bundler.default_lockfile.read.match(/^GEM$.*?^    (?:  )*spring \\((.*?)\\)$.*?^$/m))\n    Gem.paths = { 'GEM_PATH' => [Bundler.bundle_path.to_s, *Gem.path].uniq.join(Gem.path_separator) }\n    gem 'spring', match[1]\n    require 'spring/binstub'\n  end\nend\n"
  },
  {
    "path": "config/application.dev.yml",
    "content": "# Email address to use in the 'from' field for emails sent by Kochiku.\nsender_email_address: 'kochiku@example.com'\n\n# Email address where kochiku should send problems with the build system (for example, errors),\n# as distinct from failures in a particular test (which go to the people who committed code).\nkochiku_notifications_email_address: 'kochiku-notifications@example.com'\n\n# Domain name to use in constructing generic addresses. For example noreply@example.com in git commits.\ndomain_name: 'example.com'\n\n# Set to true if Kochiku is served over https\nuse_https: false\n\n# Host name where Kochiku is serving web pages.\nkochiku_host: 'kochiku.example.com'\n\n# If you commit with hitch/git-pair, etc, set this in order to send email to each person in the pair.\n# For example, github+joe+bob@example.com will turn into emails to joe@example.com and bob@example.com\n# if git_pair_email_prefix is set to 'github'.\ngit_pair_email_prefix: 'github'\n\n# Mail server which will accept mail on port 25 (standard SMTP port). If you need to use another port,\n# or other settings, you currently need to edit the kochiku source (config.action_mailer settings in\n# config/environments/production.rb).\nsmtp_server: 'localhost'\n\n# Host and port to connect to for Redis communication.\nredis_host: '127.0.0.1'\nredis_port: 6379\n\n# List your git servers (at least for now, they need to be either github, github enterprise, or\n# Atlassian Stash for things like constructing URLs to pages on those servers. Would be nice to\n# just turn off the fancy features for a vanilla git server instead, but that isn't yet possible).\n# possible values for type are: github or stash\ngit_servers:\n  github.com:\n    type: github\n\n# It is highly recommended that you create an OAuth token for Kochiku on\n# Github. This will allow Kochiku to do many things including display build\n# status on pull requests.\n#  github.com:\n#    type: github\n#    oauth_token_file: /path/to/github_oauth_token\n\n# If you would like Kochiku to clone and fetch repositories from a git mirror\n# define the repository and fill in the url to your mirror.\n#  git.example.com:\n#    mirror: 'git://git-mirror.example.com/'\n\n# If you have multiple domains pointing at your git server then define them as aliases\n#  git.example.com:\n#    aliases:\n#      - alias.example\n#      - git.alias.com\n\n# Example of Atlassian Stash integration.\n#  stash.example.com:\n#    type: stash\n#    username: kochiku-robot\n#    password_file: config/secrets/kochiku-robot-password\n"
  },
  {
    "path": "config/application.rb",
    "content": "require File.expand_path('../boot', __FILE__)\n\nrequire 'rails/all'\n\nBundler.require(:default, Rails.env)\n\nI18n.enforce_available_locales = true\n\nmodule Kochiku\n  class Application < Rails::Application\n\n    config.generators do |g|\n      g.template_engine :haml\n      g.test_framework :rspec\n      g.helper false\n    end\n\n    # Settings in config/environments/* take precedence over those specified here.\n    # Application configuration should go into files in config/initializers\n    # -- all .rb files in that directory are automatically loaded.\n\n    # Custom directories with classes and modules you want to be autoloadable.\n    # ACHTUNG: intentionally empty to help catch missing requires for config.threadsafe!\n    # config.autoload_paths += %W()\n\n    # Only load the plugins named here, in the order given (default is alphabetical).\n    # :all can be used as a placeholder for all plugins not explicitly named.\n    # config.plugins = [ :exception_notification, :ssl_requirement, :all ]\n\n    # Activate observers that should always be running.\n    config.active_record.observers = :repository_observer\n\n    # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.\n    # Run \"rake -D time\" for a list of tasks for finding time zone names. Default is UTC.\n    config.time_zone = 'Pacific Time (US & Canada)'\n\n    # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.\n    # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]\n    # config.i18n.default_locale = :de\n\n    # Configure the default encoding used in templates for Ruby 1.9.\n    config.encoding = \"utf-8\"\n\n    # Configure sensitive parameters which will be filtered from the log file.\n    # config.filter_parameters += [:password]\n\n    # Enable escaping HTML in JSON.\n    config.active_support.escape_html_entities_in_json = true\n\n    # Use SQL instead of Active Record's schema dumper when creating the database.\n    # This is necessary if your schema can't be completely dumped by the schema dumper,\n    # like if you have constraints or database-specific column types\n    # config.active_record.schema_format = :sql\n\n    # Version of your assets, change this if you want to expire all your assets\n    config.assets.version = '1.0'\n\n    config.assets.precompile << Proc.new{ |path| !File.basename(path).starts_with?('_') }\n  end\nend\n"
  },
  {
    "path": "config/application.test.yml",
    "content": "#########################################################################\n#                                                                       #\n# This version of the application.yml is used by the Kochiku test suite #\n#                                                                       #\n#########################################################################\n\n# The descriptions for these settings are in config/application.dev.yml\n\nsender_email_address: 'kochiku@example.com'\nkochiku_notifications_email_address: 'kochiku-notifications@example.com'\ndomain_name: 'example.com'\nuse_https: false\nkochiku_host: 'kochiku.example.com'\ngit_pair_email_prefix: 'github'\nsmtp_server: 'localhost'\nredis_host: '127.0.0.1'\n\ngit_servers:\n  github.com:\n    type: github\n  git.example.com:\n    type: github\n  git.squareup.com:\n    type: github\n  stash.example.com:\n    type: stash\n"
  },
  {
    "path": "config/application.yml",
    "content": "# Place your production Kochiku application config here.\n#\n# Start by copying the contents of config/application.dev.yml and modify as\n# desired.\n"
  },
  {
    "path": "config/boot.rb",
    "content": "require 'rubygems'\n\n# Set up gems listed in the Gemfile.\nENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)\n\nrequire 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])\n"
  },
  {
    "path": "config/compass.rb",
    "content": "# This configuration file works with both the Compass command line tool and within Rails.\n# Require any additional compass plugins here.\nproject_type = :rails\n\n# Set this to the root of your project when deployed:\nhttp_path = \"/\"\n\n# You can select your preferred output style here (can be overridden via the command line):\n# output_style = :expanded or :nested or :compact or :compressed\n\n# To enable relative paths to assets via compass helper functions. Uncomment:\n# relative_assets = true\n\n# To disable debugging comments that display the original location of your selectors. Uncomment:\n# line_comments = false\n\npreferred_syntax = :sass\n"
  },
  {
    "path": "config/database.production.yml.sample",
    "content": "production:\n  adapter: mysql2\n  encoding: utf8\n  reconnect: true\n  username: kochiku\n  password: the_password\n  database: kochiku\n  host: localhost\n"
  },
  {
    "path": "config/database.yml",
    "content": "development: &defaults\n  adapter: mysql2\n  encoding: utf8\n  reconnect: false\n  database: kochiku_development\n  pool: 15\n  username: root\n  password:\n\ntest: &TEST\n  <<: *defaults\n  database: kochiku_test\n  host: 127.0.0.1\n"
  },
  {
    "path": "config/deploy/production.rb",
    "content": "# Default value for default_env is {}\n# set :default_env, { path: \"/opt/ruby/bin:$PATH\" }\n\n# Server that is running the Kochiku Rails app\nserver 'kochiku.example.com', user: 'kochiku', roles: %w{web app db worker}\n"
  },
  {
    "path": "config/deploy.rb",
    "content": "# Lock version to protect against cap command being called without bundle exec\n# and executing with another version\nlock '3.4.0'\n\nset :application, \"Kochiku\"\nset :repo_url, \"https://github.com/square/kochiku.git\"\nset :user, \"kochiku\"\n\nask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }\n\n# Default value for :format is :pretty\n# set :format, :pretty\n\n# Default value for :log_level is :debug\n# set :log_level, :debug\n\n# Default value for :pty is false\n# set :pty, true\n\nset :deploy_to, \"/app/#{fetch(:user)}/kochiku\"\nset :deploy_via, :remote_cache\nset :linked_dirs, %w{log}\n\n# Reference Capistrano's flow diagram for help choosing hooks\n# http://capistranorb.com/documentation/getting-started/flow/\nbefore \"deploy:started\", \"kochiku:setup\"\nafter  \"deploy:symlink:shared\", \"kochiku:symlinks\"\nbefore \"deploy:updated\", \"deploy:overwrite_database_yml\"\n\n# warn if a legacy deploy deploy.custom.rb is in place\nif File.exist?(File.expand_path('deploy.custom.rb', File.dirname(__FILE__)))\n  warn \"Kochiku has upgraded to Capistrano 3. Placing custom capistrano config in deploy.custom.rb is no longer supported. Please move Capistrano settings to config/deploy/production.rb and remove deploy.custom.rb to make this message go away.\"\n  exit(1)\nend\n"
  },
  {
    "path": "config/environment.rb",
    "content": "# Load the rails application\nrequire File.expand_path('../application', __FILE__)\n\n# Load application settings for Kochiku\nrequire File.expand_path('../../lib/settings_accessor', __FILE__)\n\nCONF_FILE =\n  if Rails.env.test?\n    File.expand_path('../application.test.yml', __FILE__)\n  elsif Rails.env.development?\n    File.expand_path('../application.dev.yml', __FILE__)\n  else\n    File.expand_path('../application.yml', __FILE__)\n  end\n\nraise(\"#{CONF_FILE} is required to start Kochiku\") unless File.exist?(CONF_FILE)\n\nSettings = SettingsAccessor.new(File.read(CONF_FILE))\n\n# Disable symbol and yaml parsing in the XML parser to avoid\n# other code paths being exploited.\n# https://www.ruby-forum.com/attachment/8029/cve-2013-0156-poc.txt\nActiveSupport::XmlMini::PARSING.delete(\"symbol\")\nActiveSupport::XmlMini::PARSING.delete(\"yaml\")\n\n# Initialize the rails application\nKochiku::Application.initialize!\n"
  },
  {
    "path": "config/environments/development.rb",
    "content": "Kochiku::Application.configure do\n  # Settings specified here will take precedence over those in config/application.rb\n\n  # In the development environment your application's code is reloaded on\n  # every request.  This slows down response time but is perfect for development\n  # since you don't have to restart the webserver when you make code changes.\n  config.cache_classes = false\n\n  config.eager_load = false\n\n  # Show full error reports\n  config.consider_all_requests_local = true\n\n  # Enable page, action, and fragment caching\n  #\n  # Important to have enabled in development to keep cache related bugs from\n  # slipping through.\n  config.action_controller.perform_caching = true\n  config.cache_store = :memory_store, { size: 67108864 } # 64.megabytes\n\n  # Uncomment to use Redis caching in development\n  #\n  # config.cache_store = :readthis_store, {\n  #   expires_in: 2.days.to_i,\n  #   namespace: 'cache',\n  #   marshal: JSON,\n  #   redis: {\n  #     host: Settings.redis_host,\n  #     port: Settings.redis_port,\n  #     db: 1, # use different db than Resque\n  #     driver: :hiredis\n  #   }\n  # }\n\n  # Don't care if the mailer can't send\n  config.action_mailer.raise_delivery_errors = false\n\n  # Print deprecation notices to the Rails logger\n  config.active_support.deprecation = :log\n\n  # Raise an error on page load if there are pending migrations\n  config.active_record.migration_error = :page_load\n\n  # Debug mode disables concatenation and preprocessing of assets.\n  # This option may cause significant delays in view rendering with a large\n  # number of complex assets.\n  config.assets.debug = true\n\n  # suppress output of asset requests. Formerly handled by quiet_assets gem\n  config.assets.quiet = true\n\n  # Generate digests for assets URLs\n  # config.assets.digest = false\n\n  config.sass.preferred_syntax = :sass\n  Rails.application.routes.default_url_options[:host] = \"localhost:3000\"\n  config.action_mailer.default_url_options = {:host => \"localhost:3000\"}\n\n  config.after_initialize do\n    Bullet.enable = true\n    Bullet.bullet_logger = true\n    Bullet.console = true\n    Bullet.rails_logger = true\n\n    # Added because Branches#show.rss does not use the build_attempts but Branches#show.html does use them\n    Bullet.add_whitelist :type => :unused_eager_loading, :class_name => \"BuildPart\", :association => :build_attempts\n  end\nend\n"
  },
  {
    "path": "config/environments/production.rb",
    "content": "Kochiku::Application.configure do\n  # Settings specified here will take precedence over those in config/application.rb\n\n  # Code is not reloaded between requests\n  config.cache_classes = true\n\n  # Eager load code on boot. This eager loads most of Rails and\n  # your application in memory, allowing both thread web servers\n  # and those relying on copy on write to perform better.\n  # Rake tasks automatically ignore this option for performance.\n  config.eager_load = true\n\n  # Full error reports are disabled and caching is turned on\n  config.consider_all_requests_local = true   # internal service; safe to show errors\n  config.action_controller.perform_caching = true\n\n  config.cache_store = :readthis_store, {\n    expires_in: 2.days.to_i,\n    namespace: 'cache',\n    marshal: JSON,\n    redis: {\n      host: Settings.redis_host,\n      port: Settings.redis_port,\n      db: 1, # use different db than Resque\n      driver: :hiredis\n    }\n  }\n\n  # Disable Rails's static asset server (Apache or nginx will already do this)\n  config.serve_static_files = false\n\n  # Compress JavaScripts and CSS\n  config.assets.js_compressor = :uglifier\n\n  # Don't fallback to assets pipeline if a precompiled asset is missed\n  config.assets.compile = false\n\n  # Generate digests for assets URLs\n  config.assets.digest = true\n\n  # Specifies the header that your server uses for sending files\n  # config.action_dispatch.x_sendfile_header = \"X-Sendfile\" # for apache\n  # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx\n\n  # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.\n  config.force_ssl = true\n\n  # See everything in the log (default is :debug)\n  config.log_level = :info\n\n  # Prepend all log lines with the following tags\n  # config.log_tags = [ :subdomain, :uuid ]\n\n  # Use a different logger for distributed setups\n  # config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)\n\n  # Enable serving of images, stylesheets, and javascripts from an asset server\n  # config.action_controller.asset_host = \"http://assets.example.com\"\n\n  # Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)\n  # config.assets.precompile += %w( search.js )\n\n  # Disable delivery errors, bad email addresses will be ignored\n  # config.action_mailer.raise_delivery_errors = false\n\n  # Enable locale fallbacks for I18n (makes lookups for any locale fall back to\n  # the I18n.default_locale when a translation can not be found)\n  config.i18n.fallbacks = true\n\n  # Send deprecation notices to registered listeners\n  config.active_support.deprecation = :notify\n\n  # Disable automatic flushing of the log to improve performance.\n  # config.autoflush_log = false\n\n  # Use default logging formatter so that PID and timestamp are not suppressed.\n  config.log_formatter = ::Logger::Formatter.new\n\n  Rails.application.routes.default_url_options[:host] = Settings.kochiku_host\n  Rails.application.routes.default_url_options[:protocol] = Settings.kochiku_protocol\n  config.action_mailer.default_url_options = {:host => Settings.kochiku_host, :protocol => Settings.kochiku_protocol}\n\n  config.action_mailer.delivery_method = :smtp\n  config.action_mailer.smtp_settings = {\n    :address => Settings.smtp_server,\n    :port => 25\n  }\nend\n"
  },
  {
    "path": "config/environments/staging.rb",
    "content": "Kochiku::Application.configure do\n  # Settings specified here will take precedence over those in config/application.rb\n\n  config.cache_classes = true\n  config.eager_load = true\n\n  config.consider_all_requests_local = true   # internal service; safe to show errors\n  config.action_controller.perform_caching = true\n\n  config.cache_store = :readthis_store, {\n    expires_in: 2.days.to_i,\n    namespace: 'cache',\n    marshal: JSON,\n    redis: {\n      host: Settings.redis_host,\n      port: Settings.redis_port,\n      db: 1, # use different db than Resque\n      driver: :hiredis\n    }\n  }\n\n  # Disable Rails's static asset server (Apache or nginx will already do this)\n  config.serve_static_files = false\n\n  # Compress JavaScripts and CSS\n  config.assets.js_compressor = :uglifier\n\n  # Don't fallback to assets pipeline if a precompiled asset is missed\n  config.assets.compile = false\n\n  # Generate digests for assets URLs\n  config.assets.digest = true\n\n  # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.\n  config.force_ssl = true\n\n  # See everything in the log (default is :info)\n  # config.log_level = :debug\n\n  # Disable delivery errors, bad email addresses will be ignored\n  # config.action_mailer.raise_delivery_errors = false\n\n  # Enable locale fallbacks for I18n (makes lookups for any locale fall back to\n  # the I18n.default_locale when a translation can not be found)\n  config.i18n.fallbacks = true\n\n  # Send deprecation notices to registered listeners\n  config.active_support.deprecation = :notify\n\n  # Disable automatic flushing of the log to improve performance.\n  # config.autoflush_log = false\n\n  # Use default logging formatter so that PID and timestamp are not suppressed.\n  config.log_formatter = ::Logger::Formatter.new\n\n  Rails.application.routes.default_url_options[:host] = Settings.kochiku_host\n  Rails.application.routes.default_url_options[:protocol] = Settings.kochiku_protocol\n  config.action_mailer.default_url_options = {:host => Settings.kochiku_host, :protocol => Settings.kochiku_protocol}\n\n  config.action_mailer.delivery_method = :smtp\n  config.action_mailer.smtp_settings = {\n    :address => Settings.smtp_server,\n    :port => 25\n  }\nend\n"
  },
  {
    "path": "config/environments/test.rb",
    "content": "Kochiku::Application.configure do\n  # Settings specified here will take precedence over those in config/application.rb\n\n  # The test environment is used exclusively to run your application's\n  # test suite.  You never need to work with it otherwise.  Remember that\n  # your test database is \"scratch space\" for the test suite and is wiped\n  # and recreated between test runs.  Don't rely on the data there!\n  config.cache_classes = true\n\n  # Do not eager load code on boot. This avoids loading your whole application\n  # just for the purpose of running a single test. If you are using a tool that\n  # preloads Rails for running tests, you may have to set it to true.\n  config.eager_load = false\n\n  # Configure static asset server for tests with Cache-Control for performance\n  config.public_file_server.enabled = true\n  config.public_file_server.headers = { 'Cache-Control' => 'public, max-age=3600' }\n\n  # Show full error reports and disable caching\n  config.consider_all_requests_local = true\n  config.action_controller.perform_caching = false\n\n  # Raise exceptions instead of rendering exception templates\n  config.action_dispatch.show_exceptions = false\n\n  # Disable request forgery protection in test environment\n  config.action_controller.allow_forgery_protection = false\n\n  # Tell Action Mailer not to deliver emails to the real world.\n  # The :test delivery method accumulates sent emails in the\n  # ActionMailer::Base.deliveries array.\n  config.action_mailer.delivery_method = :test\n\n  # Print deprecation notices to the stderr\n  config.active_support.deprecation = :stderr\n\n  config.after_initialize do\n    Resque.redis.namespace = \"resque:kochiku:test\"\n  end\n  Rails.application.routes.default_url_options[:host] = \"localhost:3001\"\n  config.action_mailer.default_url_options = {:host => \"localhost:3000\"}\nend\n"
  },
  {
    "path": "config/initializers/backtrace_silencers.rb",
    "content": "# Be sure to restart your server when you modify this file.\n\n# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces.\n# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ }\n\n# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code.\n# Rails.backtrace_cleaner.remove_silencers!\n"
  },
  {
    "path": "config/initializers/cocaine.rb",
    "content": "Cocaine::CommandLine.logger = Rails.logger\n"
  },
  {
    "path": "config/initializers/inflections.rb",
    "content": "# Be sure to restart your server when you modify this file.\n\n# Add new inflection rules using the following format\n# (all these examples are active by default):\n# ActiveSupport::Inflector.inflections do |inflect|\n#   inflect.plural /^(ox)$/i, '\\1en'\n#   inflect.singular /^(ox)en/i, '\\1'\n#   inflect.irregular 'person', 'people'\n#   inflect.uncountable %w( fish sheep )\n# end\n#\n# These inflection rules are supported but not enabled by default:\n# ActiveSupport::Inflector.inflections do |inflect|\n#   inflect.acronym 'RESTful'\n# end\n"
  },
  {
    "path": "config/initializers/load_build_strategy.rb",
    "content": "if Rails.env.test? || Rails.env.development?\n  require 'build_strategies/no_op_build_strategy'\nelse\n  require 'build_strategies/production_build_strategy'\nend\n"
  },
  {
    "path": "config/initializers/mime_types.rb",
    "content": "# Be sure to restart your server when you modify this file.\n\n# Add new mime types for use in respond_to blocks:\n# Mime::Type.register \"text/richtext\", :rtf\n# Mime::Type.register_alias \"text/html\", :iphone\n"
  },
  {
    "path": "config/initializers/readthis.rb",
    "content": "if Rails.env.staging? || Rails.env.production?\n  # Allow Rails to continue serving requests if Redis crashes\n  # https://github.com/sorentwo/readthis#fault-tolerance\n  Readthis.fault_tolerant = true\nend\n"
  },
  {
    "path": "config/initializers/redis.rb",
    "content": "REDIS = Redis.new(\n  host: Settings.redis_host,\n  port: Settings.redis_port\n)\n"
  },
  {
    "path": "config/initializers/resque.rb",
    "content": "require 'resque-scheduler'\nrequire 'resque-retry'\nrequire 'resque/failure/redis'\nrequire 'resque-retry/server'\n\nResque.redis = REDIS\nResque.redis.namespace = \"resque:kochiku\"\n\n# Necessary to specify the schedule file here for the scheduled jobs to appear\n# in the resque-web UI\nResque.schedule = YAML.load_file('config/resque_schedule.yml')\n\nResque::Failure::MultipleWithRetrySuppression.classes = [Resque::Failure::Redis]\nResque::Failure.backend = Resque::Failure::MultipleWithRetrySuppression\n"
  },
  {
    "path": "config/initializers/secret_token.rb",
    "content": "# Be sure to restart your server when you modify this file.\n\n# Your secret key is used for verifying the integrity of signed cookies.\n# If you change this key, all old signed cookies will become invalid!\n\n# Make sure the secret is at least 30 characters and all random,\n# no regular words or you'll be exposed to dictionary attacks.\n\n# Use a random hex since so we don't currently use cookies for anything in\n# Kochiku\nKochiku::Application.config.secret_key_base = SecureRandom.hex(64)\n"
  },
  {
    "path": "config/initializers/session_store.rb",
    "content": "# Be sure to restart your server when you modify this file.\n\nKochiku::Application.config.session_store :cookie_store, :key => '_kochiku_session'\n\n# Use the database for sessions instead of the cookie-based default,\n# which shouldn't be used to store highly confidential information\n# (create the session table with \"rails generate session_migration\")\n# Kochiku::Application.config.session_store :active_record_store\n"
  },
  {
    "path": "config/initializers/wrap_parameters.rb",
    "content": "# Be sure to restart your server when you modify this file.\n#\n# This file contains settings for ActionController::ParamsWrapper which\n# is enabled by default.\n\n# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array.\nActiveSupport.on_load(:action_controller) do\n  wrap_parameters :format => []\nend\n\nActiveSupport.on_load(:active_record) do\n  # The new default is false but Kochiku started when it was true\n  self.include_root_in_json = true\nend\n"
  },
  {
    "path": "config/kochiku.yml",
    "content": "test_command: 'script/ci'\n\nruby:\n - 2.4.3\n\ntargets:\n  - type: spec\n    glob: spec/**/*_spec.rb\n    workers: 1\n"
  },
  {
    "path": "config/kochiku.yml.sample",
    "content": "# By listing ruby versions, all of you tests can be run against multiple versions\nruby:\n  - 2.1.2\n\n# You can list additional log files to be uploaded by the workers\n# the stdout from your build will always be uploaded\nlog_file_globs:\n  - log/test.log\n  - myLogs/*.log\n\n# Your test command should be specified here\ntest_command: script/ci\n\n# You can create a script to be run inside your repo after a green build\non_success_script: script/success\n\n# Listing targets helps kochiku shard your build\ntargets:\n  - type: spec\n    glob: spec/**/*_spec.rb\n    workers: 1\n    # If you have multiple workers, you can set a balance strategy\n    balance: round_robin # is the default strategy\n    # Creating a time manifest helps kochiku better partition the target\n    time_manifest: config/ci/time_manifest.yml # requires round_robin balance\n    # Listing log files for a target overrides the global log files\n    log_file_globs:\n      - log/*.html\n"
  },
  {
    "path": "config/locales/en.yml",
    "content": "# Sample localization file for English. Add more files in this directory for other locales.\n# See https://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points.\n\nen:\n  hello: \"Hello world\"\n"
  },
  {
    "path": "config/resque_schedule.yml",
    "content": "poll_repositories_for_changes:\n  every:\n    - \"10m\"\n    - :first_in: \"5s\"\n  class: \"PollRepositoriesJob\"\n  queue: low\n  args:\n  description: \"Fetches any missed changes from added repositories\"\n\nenforce_timeouts_on_attempts:\n  every:\n    - \"5m\"\n    - :first_in: \"5m\"\n  class: \"EnforceTimeoutsJob\"\n  queue: low\n  args:\n  description: \"Errors any attempts where the workers should have timed out\"\n"
  },
  {
    "path": "config/routes.rb",
    "content": "require 'resque/server'\n\nKochiku::Application.routes.draw do\n  mount Resque::Server.new, :at => '/resque'\n\n  if Rails.env.development?\n    # https://github.com/rails/rails/pull/17896\n    get '/rails/mailers' => \"rails/mailers#index\"\n    get '/rails/mailers/*path' => \"rails/mailers#preview\"\n  end\n\n  root :to => \"repositories#dashboard\"\n\n  get '/_status' => \"status#available\"\n\n  # /repositories/1/build-ref?ref=master&sha=abc123\n  resources :repositories, only: [:index, :create, :new, :update, :destroy] do\n    member do\n      post \"build-ref\", :action => 'build_ref', :as => 'build_ref'\n    end\n  end\n\n  match '/XmlStatusReport.aspx', to: \"branches#status_report\", defaults: {:format => 'xml'}, via: :get\n  match '/worker_health', to: \"dashboards#build_history_by_worker\", via: :get, as: :build_history_by_worker\n\n  match 'builds/:id' => \"builds#build_redirect\", :via => :get, :as => :build_redirect, :id => /\\d+/\n  match 'builds/:id/status' => \"builds#build_status\", :via => :get, :as => :build_status, :id => /\\d+/, :defaults => { :format => 'json' }\n  match 'builds/:ref' => \"builds#build_ref_redirect\", :via => :get, :as => :build_ref_redirect\n  match '/build_attempts/:build_attempt_id/build_artifacts' => \"build_artifacts#create\", :via => :post\n  match '/build_attempts/:id/start' => \"build_attempts#start\", :via => :post\n  match '/build_attempts/:id/finish' => \"build_attempts#finish\", :via => :post, :as => :finish_build_attempt\n  # left here for backward compatibility in case if anyone uses it. /build_attempts/:id should be used instead.\n  match '/build_attempts/:id/build_part' => \"build_attempts#show\", :via => :get, :as => :build_part_redirect\n  match '/build_attempts/:id/stream_logs' => \"build_attempts#stream_logs\", :via => :get, :as => :stream_logs\n  match '/build_attempts/:id/stream_logs_chunk' => \"build_attempts#stream_logs_chunk\", :via => :get, :as => :stream_logs_chunk\n  match '/pull-request-builder' => \"pull_requests#build\", :via => :post, :as => :pull_request_build\n  get 'badge/*repository_path', to: 'branches#badge'\n\n  # Redirects for legacy urls\n  get '/projects/:project_id/builds/:build_id', to: redirect('/builds/%{build_id}')\n\n  resources :build_artifacts, :only => [:show]\n  resources :builds, only: [:create]\n  resources :build_attempts, only: [:show]\n\n  scope path: \"*repository_path\", as: 'repository', constraints: { repository_path: /[^\\/]+\\/[^\\/]+/ }, format: false do\n    get 'edit', to: 'repositories#edit'\n\n    resources :builds, only: [:show] do\n      post 'toggle-merge-on-success', :action => \"toggle_merge_on_success\", :on => :member, :as => :toggle_merge_on_success\n      patch 'abort', :action => \"abort\", :on => :member\n      get 'status', :action => \"build_status\", :on => :member, :defaults => { :format => 'json' }\n      post 'rebuild-failed-parts', :action => \"rebuild_failed_parts\", :on => :member, :as => :rebuild_failed_parts\n      post 'retry-partitioning', :action => \"retry_partitioning\", :on => :member, :as => :retry_partitioning\n      get 'modified_time', :action => \"modified_time\", :on => :member, :defaults => { :format => 'json' }\n      get 'refresh_build_part_info', :action => \"refresh_build_part_info\", :on => :member, :defaults => { :format => 'json' }\n      post 'resend-status', :action => \"resend_status\",  :on => :member, :defaults => { :format => 'json' }\n\n      resources :build_parts, as: 'parts', path: 'parts', only: [:show] do\n        post 'rebuild', on: :member\n        get 'modified_time', action: \"modified_time\", on: :member, defaults: { format: 'json' }\n        get 'refresh_build_part_info', :action => \"refresh_build_part_info\", :on => :member, :defaults => { :format => 'json' }\n      end\n    end\n\n    # override branch id to allow branch name to contain both slashes and dots\n    resources :branches, path: \"\", only: [:index, :show], constraints: { id: /.+/ } do\n      member do\n        post 'request-new-build', action: \"request_new_build\"\n        get 'build-time-history', action: \"build_time_history\", defaults: { format: 'json' }\n        get 'health', action: 'health'\n      end\n      get 'status-report', action: \"status_report\", on: :collection\n    end\n  end\nend\n"
  },
  {
    "path": "config.ru",
    "content": "# This file is used by Rack-based servers to start the application.\n\nrequire ::File.expand_path('../config/environment',  __FILE__)\nrun Kochiku::Application\n"
  },
  {
    "path": "db/migrate/20110621212000_create_schema.rb",
    "content": "class CreateSchema < ActiveRecord::Migration[5.0]\n  def self.up\n    create_table :builds do |t|\n      t.string :sha\n      t.string :state\n      t.string :queue\n\n      t.timestamps(null: false)\n    end\n\n    create_table :build_parts do |t|\n      t.integer :build_id\n      t.string :kind\n      t.text :paths\n\n      t.timestamps(null: false)\n    end\n\n    create_table :build_part_results do |t|\n      t.integer :build_part_id\n      t.datetime :started_at\n      t.datetime :finished_at\n      t.string :builder\n      t.string :result\n\n      t.timestamps(null: false)\n    end\n\n    create_table :build_artifacts do |t|\n      t.integer :build_part_result_id\n      t.string :type\n      t.text :content\n\n      t.timestamps(null: false)\n    end\n  end\n\n  def self.down\n    drop_table :build_artifacts\n    drop_table :build_part_results\n    drop_table :build_parts\n    drop_table :builds\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110624003418_change_artifact_type_to_name.rb",
    "content": "class ChangeArtifactTypeToName < ActiveRecord::Migration[5.0]\n  def self.up\n    rename_column :build_artifacts, :type, :name\n  end\n\n  def self.down\n    rename_column :build_artifacts, :name, :type\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110624015709_rename_build_part_result_result_to_state.rb",
    "content": "class RenameBuildPartResultResultToState < ActiveRecord::Migration[5.0]\n  def self.up\n    rename_column :build_part_results, :result, :state\n  end\n\n  def self.down\n    rename_column :build_part_results, :state, :result\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110708203120_change_build_artifacts_for_carrier_wave.rb",
    "content": "class ChangeBuildArtifactsForCarrierWave < ActiveRecord::Migration[5.0]\n  def self.up\n    rename_column :build_artifacts, :name, :log_file\n    remove_column :build_artifacts, :content\n  end\n\n  def self.down\n    add_column    :build_artifacts, :content, :text\n    rename_column :build_artifacts, :log_file, :name\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110713175724_rename_build_part_result_to_build_part_run.rb",
    "content": "class RenameBuildPartResultToBuildPartRun < ActiveRecord::Migration[5.0]\n  def self.up\n    rename_table :build_part_results, :build_attempts\n    rename_column :build_artifacts, :build_part_result_id, :build_attempt_id\n  end\n\n  def self.down\n    rename_column :build_artifacts, :build_attempt_id, :build_part_result_id\n    rename_table :build_attempts, :build_part_results\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110713191536_add_foreign_key_indexes.rb",
    "content": "class AddForeignKeyIndexes < ActiveRecord::Migration[5.0]\n  def self.up\n    add_index :build_parts, :build_id\n    add_index :build_attempts, :build_part_id\n    add_index :build_artifacts, :build_attempt_id\n  end\n\n  def self.down\n    remove_index :build_parts, column: :build_id\n    remove_index :build_attempts, column: :build_part_id\n    remove_index :build_artifacts, column: :build_attempt_id\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110719204508_create_projects.rb",
    "content": "class CreateProjects < ActiveRecord::Migration[5.0]\n  def self.up\n    create_table :projects do |t|\n      t.string :name\n      t.string :branch\n\n      t.timestamps(null: false)\n    end\n    add_index :projects, [:name, :branch]\n  end\n\n  def self.down\n    drop_table :projects\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110719205413_add_project_id_to_builds.rb",
    "content": "class AddProjectIdToBuilds < ActiveRecord::Migration[5.0]\n  def self.up\n    add_column :builds, :project_id, :integer\n    add_index  :builds, :project_id\n  end\n\n  def self.down\n    remove_index  :builds, column: :project_id\n    remove_column :builds, :project_id\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110721185201_rename_builds_sha_to_ref.rb",
    "content": "class RenameBuildsShaToRef < ActiveRecord::Migration[5.0]\n  def self.up\n    rename_column :builds, :sha, :ref\n  end\n\n  def self.down\n    rename_column :builds, :ref, :sha\n  end\nend\n"
  },
  {
    "path": "db/migrate/20110801215540_rename_error_state_to_errored.rb",
    "content": "class RenameErrorStateToErrored < ActiveRecord::Migration[5.0]\n  def self.up\n    execute(\"UPDATE build_attempts SET state='errored' WHERE state='error'\")\n    execute(\"UPDATE builds SET state='errored' WHERE state='error'\")\n  end\n\n  def self.down\n    execute(\"UPDATE builds SET state='error' WHERE state='errored'\")\n    execute(\"UPDATE build_attempts SET state='error' WHERE state='errored'\")\n  end\nend\n"
  },
  {
    "path": "db/migrate/20120803005242_add_merge_bool_to_build.rb",
    "content": "class AddMergeBoolToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :auto_merge, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20120817225343_add_branch_to_build.rb",
    "content": "class AddBranchToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :branch, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121008211955_create_repositories.rb",
    "content": "class CreateRepositories < ActiveRecord::Migration[5.0]\n  def change\n    create_table :repositories do |t|\n      t.string :url\n      t.string :test_command\n      t.text :options\n      t.timestamps(null: false)\n    end\n    add_index :repositories, :url\n    add_column :projects, :repository_id, :integer\n    add_index :projects, :repository_id\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121017173936_add_github_repository_id_to_repository.rb",
    "content": "class AddGithubRepositoryIdToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :github_post_receive_hook_id, :integer\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121017182543_fix_repository_schema.rb",
    "content": "class FixRepositorySchema < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :run_ci, :boolean\n    add_column :repositories, :use_branches_on_green, :boolean\n    add_column :repositories, :build_pull_requests, :boolean\n    add_column :repositories, :on_green_update, :string\n    add_column :repositories, :use_spec_and_ci_queues, :boolean\n    add_column :repositories, :repo_cache_dir, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121017184946_remove_options_from_repository.rb",
    "content": "class RemoveOptionsFromRepository < ActiveRecord::Migration[5.0]\n  def change\n    remove_column :repositories, :options\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121017222538_add_target_name_to_builds.rb",
    "content": "class AddTargetNameToBuilds < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :target_name, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121017224003_add_command_flag_to_repositories.rb",
    "content": "class AddCommandFlagToRepositories < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :command_flag, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121018182435_add_options_to_build_part.rb",
    "content": "class AddOptionsToBuildPart < ActiveRecord::Migration[5.0]\n  def change\n    add_column :build_parts, :options, :text\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121024005715_add_send_build_failure_email_to_repository.rb",
    "content": "class AddSendBuildFailureEmailToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :send_build_failure_email, :boolean, :default => true\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121024164929_record_build_failure_email_sent.rb",
    "content": "class RecordBuildFailureEmailSent < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :build_failure_email_sent, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121024210129_add_success_script_to_repositories.rb",
    "content": "class AddSuccessScriptToRepositories < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :on_success_script, :string\n    add_column :builds, :promoted, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121024212949_add_on_success_log_file_to_build.rb",
    "content": "class AddOnSuccessLogFileToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :on_success_script_log_file, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121030213442_add_queue_to_repository.rb",
    "content": "class AddQueueToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :queue_override, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20121101220831_add_timeout_to_repository.rb",
    "content": "class AddTimeoutToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :timeout, :integer, :default => 40\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130226232844_add_index_to_build_ref.rb",
    "content": "class AddIndexToBuildRef < ActiveRecord::Migration[5.0]\n  def change\n    add_index :builds, :ref\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130409144945_add_on_success_note_to_repositories.rb",
    "content": "class AddOnSuccessNoteToRepositories < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :on_success_note, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130511012855_add_deployable_map_to_build.rb",
    "content": "class AddDeployableMapToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :deployable_map, :text\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130626183046_add_maven_modules_to_build.rb",
    "content": "class AddMavenModulesToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :maven_modules, :text\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130627194433_add_index_to_build_part_paths.rb",
    "content": "class AddIndexToBuildPartPaths < ActiveRecord::Migration[5.0]\n  def change\n    add_index :build_parts, :paths, :length => {:paths => 255}\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130709123456_add_upload_artifacts_to_build_parts.rb",
    "content": "class AddUploadArtifactsToBuildParts < ActiveRecord::Migration[5.0]\n  def change\n    add_column :build_parts, :upload_artifacts, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130822191419_add_queue_to_build_part.rb",
    "content": "class AddQueueToBuildPart < ActiveRecord::Migration[5.0]\n  def up\n    add_column :build_parts, :queue, :string\n\n    execute(\"UPDATE build_parts,builds SET build_parts.queue = builds.queue WHERE builds.id = build_parts.build_id\")\n\n    remove_column :builds, :queue\n    remove_column :repositories, :use_spec_and_ci_queues\n  end\n\n  def down\n    add_column :repositories, :use_spec_and_ci_queues, :boolean\n    add_column :builds, :queue, :string\n    remove_column :build_parts, :queue\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130822231850_remove_upload_artifacts_from_build_parts.rb",
    "content": "class RemoveUploadArtifactsFromBuildParts < ActiveRecord::Migration[5.0]\n  def up\n    remove_column :build_parts, :upload_artifacts\n  end\n\n  def down\n    add_column :build_parts, :upload_artifacts, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130823210844_add_retry_count_to_build_part.rb",
    "content": "class AddRetryCountToBuildPart < ActiveRecord::Migration[5.0]\n  def change\n    add_column :build_parts, :retry_count, :integer, default: 0\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130823231854_remove_java_specific_stuff.rb",
    "content": "class RemoveJavaSpecificStuff < ActiveRecord::Migration[5.1]\n  def up\n    remove_column :builds, :deployable_map\n    remove_column :builds, :maven_modules\n  end\n\n  def down\n    add_column :builds, :deployable_map, :text\n    add_column :builds, :maven_modules, :text\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130823234546_remove_queue_override_from_repositories.rb",
    "content": "class RemoveQueueOverrideFromRepositories < ActiveRecord::Migration[5.0]\n  def up\n    remove_column :repositories, :queue_override\n  end\n\n  def down\n    add_column :repositories, :queue_override, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20130910190203_add_repository_name_as_column.rb",
    "content": "class AddRepositoryNameAsColumn < ActiveRecord::Migration[5.0]\n  Rails.logger = Logger.new(STDOUT)\n\n  URL_PARSERS = {\n    \"git@\" => /@(.*):(.*)\\/(.*)\\.git/,\n    \"git:\" => /:\\/\\/(.*)\\/(.*)\\/(.*)\\.git/,\n    \"http\" => /https?:\\/\\/(.*)\\/(.*)\\/([^.]*)\\.?/,\n    'ssh:' => %r{ssh://git@(.*):(\\d+)/(.*)/([^.]+)\\.git}\n  }.freeze\n\n  class Repository < ActiveRecord::Base\n  end\n\n  def project_params(url)\n    # TODO: Use the parsers in the RemoteServer classes.\n    parser = URL_PARSERS[url.slice(0,4)]\n    match = url.match(parser)\n\n    if match.length > 4\n      {\n        host:       match[1],\n        port:       match[2].to_i,\n        username:   match[3],\n        repository: match[4]\n      }\n    else\n      {\n        host:       match[1],\n        username:   match[2],\n        repository: match[3]\n      }\n    end\n  end\n\n  def old_style_repository_name(url)\n    project_params(url)[:repository]\n  end\n\n  def up\n    add_column :repositories, :repository_name, :string\n\n    Repository.all.each do |repository|\n      repository.update_attribute(:repository_name, old_style_repository_name(repository.url))\n    end\n\n    repository_count = Repository.all.each_with_object({}) do |repository, duplicates|\n      duplicates[repository.repository_name] ||= 0\n      duplicates[repository.repository_name] += 1\n      duplicates\n    end\n\n    duplicates = repository_count.select { |name, count| count > 1 }\n    if duplicates.any?\n      Rails.logger.warn(\"\")\n      Rails.logger.warn(\"\")\n      Rails.logger.warn((\"*\" * 80))\n      Rails.logger.warn(\"Duplicate repositories detected.\")\n    end\n    duplicates.each do |name, count|\n      Rails.logger.warn(\"Found #{count} repositories named #{name}. Please rename them.\")\n    end\n  end\n\n  def down\n    remove_column :repositories, :repository_name\n  end\nend\n"
  },
  {
    "path": "db/migrate/20131217022000_add_error_text_to_build.rb",
    "content": "class AddErrorTextToBuild < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :error_details, :text\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140123234208_add_allows_kochiku_merges_to_repository.rb",
    "content": "class AddAllowsKochikuMergesToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :allows_kochiku_merges, :boolean, default: true\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140128180258_rename_auto_merge_on_build.rb",
    "content": "class RenameAutoMergeOnBuild < ActiveRecord::Migration[5.0]\n  def change\n    rename_column :builds, :auto_merge, :merge_on_success\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140415001051_remove_use_branches_on_green_from_repositories.rb",
    "content": "class RemoveUseBranchesOnGreenFromRepositories < ActiveRecord::Migration[5.0]\n  def change\n    remove_column :repositories, :use_branches_on_green, :boolean\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140415011144_remove_command_flag_from_repositories.rb",
    "content": "class RemoveCommandFlagFromRepositories < ActiveRecord::Migration[5.0]\n  def change\n    remove_column :repositories, :command_flag, :string\n    remove_column :builds,       :target_name,  :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140506012721_unique_index_on_builds_ref.rb",
    "content": "class UniqueIndexOnBuildsRef < ActiveRecord::Migration[5.0]\n  def up\n    remove_index :builds, column: :ref\n\n    # set length to 40 characters and add not null constraint\n    change_column :builds, :ref, :string, { limit: 40, null: false }\n\n    add_index :builds, [:ref, :project_id], :unique => true\n  end\n\n  def down\n    remove_index :builds, column: [:ref, :project_id]\n\n    change_column :builds, :ref, :string\n\n    add_index :builds, :ref\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140507184819_add_host_and_namespace_to_repositories.rb",
    "content": "class AddHostAndNamespaceToRepositories < ActiveRecord::Migration[5.0]\n  def up\n    rename_column :repositories, :repository_name, :name\n    change_column :repositories, :name, :string, null: false  # add not null constraint\n    add_column :repositories, :host, :string, null: false\n    add_column :repositories, :namespace, :string, null: true  # generic git servers will not have a namespace\n\n    add_index :repositories, [:host, :namespace, :name],\n              name: 'index_repositories_on_host_and_namespace_and_name',\n              unique: true\n\n    Repository.all.each do |repository|\n      attributes = RemoteServer.for_url(repository.url).attributes\n      repository.update_attributes!(\n        :host => attributes.fetch(:host),\n        :namespace => attributes.fetch(:repository_namespace)\n      )\n    end\n  end\n\n  def down\n    remove_index :repositories, name: 'index_repositories_on_host_and_namespace_and_name'\n    remove_columns :repositories, :namespace, :host\n    change_column :repositories, :name, :string, null: true\n    rename_column :repositories, :name, :repository_name\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140617214701_add_success_email.rb",
    "content": "class AddSuccessEmail < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :build_success_email_sent, :boolean, :default => false, :null => false\n    add_column :repositories, :send_build_success_email, :boolean, :default => true, :null => false\n    reversible do |dir|\n      change_table :builds do |t|\n        dir.up do\n          execute 'UPDATE builds SET build_failure_email_sent = 0 WHERE build_failure_email_sent IS NULL'\n          t.change :build_failure_email_sent, :boolean, :default => false, :null => false\n        end\n        dir.down { t.change :build_failure_email_sent, :boolean, :default => nil, :null => true }\n      end\n      change_table :repositories do |t|\n        dir.up do\n          execute 'UPDATE repositories SET send_build_failure_email = 1 WHERE send_build_failure_email IS NULL'\n          t.change :send_build_failure_email, :boolean, :default => true, :null => false\n        end\n        dir.down { t.change :send_build_failure_email, :boolean, :default => true, :null => true }\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "db/migrate/20140715225910_remove_notes.rb",
    "content": "class RemoveNotes < ActiveRecord::Migration[5.0]\n  def change\n    remove_column :repositories, :on_success_note, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20141031234747_add_email_first_failure_to_repositories.rb",
    "content": "class AddEmailFirstFailureToRepositories < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :email_on_first_failure, :boolean, default: false, null: false\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150324001246_remove_on_success_script_from_repositories.rb",
    "content": "class RemoveOnSuccessScriptFromRepositories < ActiveRecord::Migration[5.0]\n  def up\n    # Guard against deleting any data\n    rows_with_old_data = select_value(\"select count(*) from repositories where on_success_script IS NOT NULL AND on_success_script != ''\")\n\n    if rows_with_old_data > 0\n      err_message = <<-ERR_MESSAGE\n        \"Found #{rows_with_old_data} rows in the Repositories table with non-empty values\"\n        \"for `on_success_script`.\"\n\n        \"Kochiku no longer supports on_success_script inside of the repository table.\"\n        \"The new location is inside of each project's kochiku.yml file.\"\n\n        \"Please remove the data from the on_success_script column and re-run this migration.\"\n      ERR_MESSAGE\n\n      Rails.logger.error(err_message)\n      exit(1)\n    end\n\n    remove_column :repositories, :on_success_script, :string\n  end\n\n  def down\n    add_column :repositories, :on_success_script, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150331160909_add_send_merge_successful_email.rb",
    "content": "class AddSendMergeSuccessfulEmail < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :send_merge_successful_email, :boolean, default: true, null: false\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150714234635_add_log_port_to_build_attempt.rb",
    "content": "class AddLogPortToBuildAttempt < ActiveRecord::Migration[5.0]\n  def change\n    add_column :build_attempts, :log_streamer_port, :integer\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150717214656_create_branches.rb",
    "content": "class CreateBranches < ActiveRecord::Migration[5.0]\n  def change\n    create_table :branches do |t|\n      t.references :repository, null: false\n      t.string :name, null: false\n      t.boolean :convergence, null: false, default: false\n      t.timestamps null: false\n\n      t.index([:repository_id, :name], unique: true)\n      t.index(:convergence)\n    end\n\n    change_table :builds do |t|\n      t.references :branch, index: true\n      t.index([:ref, :branch_id], unique: true)\n    end\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150717220149_assign_builds_to_branches.rb",
    "content": "# In this migration intentionally go out of our way to not use the Project\n# model so that it can be removed from the codebase.\nclass AssignBuildsToBranches < ActiveRecord::Migration[5.0]\n  def up\n    # Be mindful of build.branch versus build#branch_id\n    # `build.branch` is a reference to the name of the branch as a string\n    Build.where(branch_id: nil).find_each do |build|\n      repository_id = connection.select_value(\"SELECT repository_id FROM projects WHERE id = #{build.project_id}\")\n\n      if repository_id.nil?\n        Rails.logger.error \"skipping Build #{build.id} because its project or repository not longer exists\"\n        next\n      end\n\n      branch_record = if build.branch.present?\n                        Branch.find_or_create_by(repository_id: repository_id, name: build.branch)\n                      else\n                        Branch.find_or_create_by(repository_id: repository_id, name: \"unknown\")\n                      end\n\n      build.update_column(:branch_id, branch_record.id)\n    end\n\n    # Automatically set all master branches as convergence branches to maintain\n    # previous behavior.\n    Branch.where(name: 'master').update_all(convergence: true)\n\n    # the 'branch' column is removed from builds in the next migration\n  end\n\n  def down\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150717231250_remove_branch_string_from_builds.rb",
    "content": "class RemoveBranchStringFromBuilds < ActiveRecord::Migration[5.0]\n  def change\n    # The previous migration (AssignBuildsToBranches) mapped branch_id on\n    # builds to the newly introduced Branch records. With that complete it is\n    # safe to remove the legacy branch (string) column from builds.\n    remove_column :builds, :branch, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20150719130110_index_repositories_namespace_and_name.rb",
    "content": "class IndexRepositoriesNamespaceAndName < ActiveRecord::Migration[5.0]\n  def change\n    add_index :repositories, [:namespace, :name], unique: true\n  end\nend\n"
  },
  {
    "path": "db/migrate/20151111080255_remove_repo_cache_dir_from_repositories.rb",
    "content": "class RemoveRepoCacheDirFromRepositories < ActiveRecord::Migration[5.0]\n  def change\n    remove_column :repositories, :repo_cache_dir, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20151114185514_fix_convergence_index.rb",
    "content": "# In order for the index on convergence col to be useful it needs to be\n# namespaced by repository_id\nclass FixConvergenceIndex < ActiveRecord::Migration[5.0]\n  def change\n    # Add the new index first to avoid killing performance\n    add_index :branches, [:repository_id, :convergence]\n    remove_index :branches, column: :convergence\n  end\nend\n"
  },
  {
    "path": "db/migrate/20160408214135_index_created_at_on_build_attempts.rb",
    "content": "class IndexCreatedAtOnBuildAttempts < ActiveRecord::Migration[5.0]\n  def change\n    add_index :build_attempts, :created_at\n  end\nend\n"
  },
  {
    "path": "db/migrate/20170804214538_add_enabled_bool_to_repositories.rb",
    "content": "class AddEnabledBoolToRepositories < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :enabled, :boolean, default: true, null: false\n  end\nend\n"
  },
  {
    "path": "db/migrate/20180208202524_add_test_command_to_builds.rb",
    "content": "class AddTestCommandToBuilds < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :test_command, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20180220185338_add_assume_lost_after_to_repository.rb",
    "content": "class AddAssumeLostAfterToRepository < ActiveRecord::Migration[5.0]\n  def change\n    add_column :repositories, :assume_lost_after, :integer\n  end\nend\n"
  },
  {
    "path": "db/migrate/20180227222254_add_initiated_by_to_builds.rb",
    "content": "class AddInitiatedByToBuilds < ActiveRecord::Migration[5.0]\n  def change\n    add_column :builds, :initiated_by, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20180301221320_add_instance_type_to_build_attempts.rb",
    "content": "class AddInstanceTypeToBuildAttempts < ActiveRecord::Migration[5.0]\n  def change\n    add_column :build_attempts, :instance_type, :string\n  end\nend\n"
  },
  {
    "path": "db/migrate/20180619210823_add_kochiku_yml_config_to_builds.rb",
    "content": "class AddKochikuYmlConfigToBuilds < ActiveRecord::Migration[5.1]\n  def change\n    add_column :builds, :kochiku_yml_config, :text\n  end\nend\n"
  },
  {
    "path": "db/schema.rb",
    "content": "# This file is auto-generated from the current state of the database. Instead\n# of editing this file, please use the migrations feature of Active Record to\n# incrementally modify your database, and then regenerate this schema definition.\n#\n# Note that this schema.rb definition is the authoritative source for your\n# database schema. If you need to create the application database on another\n# system, you should be using db:schema:load, not running all the migrations\n# from scratch. The latter is a flawed and unsustainable approach (the more migrations\n# you'll amass, the slower it'll run and the greater likelihood for issues).\n#\n# It's strongly recommended that you check this file into your version control system.\n\nActiveRecord::Schema.define(version: 20180619210823) do\n\n  create_table \"branches\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.integer \"repository_id\", null: false\n    t.string \"name\", null: false\n    t.boolean \"convergence\", default: false, null: false\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.index [\"repository_id\", \"convergence\"], name: \"index_branches_on_repository_id_and_convergence\"\n    t.index [\"repository_id\", \"name\"], name: \"index_branches_on_repository_id_and_name\", unique: true\n    t.index [\"repository_id\"], name: \"index_branches_on_repository_id\"\n  end\n\n  create_table \"build_artifacts\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.integer \"build_attempt_id\"\n    t.string \"log_file\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.index [\"build_attempt_id\"], name: \"index_build_artifacts_on_build_attempt_id\"\n  end\n\n  create_table \"build_attempts\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.integer \"build_part_id\"\n    t.datetime \"started_at\"\n    t.datetime \"finished_at\"\n    t.string \"builder\"\n    t.string \"state\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.integer \"log_streamer_port\"\n    t.string \"instance_type\"\n    t.index [\"build_part_id\"], name: \"index_build_attempts_on_build_part_id\"\n    t.index [\"created_at\"], name: \"index_build_attempts_on_created_at\"\n  end\n\n  create_table \"build_parts\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.integer \"build_id\"\n    t.string \"kind\"\n    t.text \"paths\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.text \"options\"\n    t.string \"queue\"\n    t.integer \"retry_count\", default: 0\n    t.index [\"build_id\"], name: \"index_build_parts_on_build_id\"\n    t.index [\"paths\"], name: \"index_build_parts_on_paths\", length: { paths: 255 }\n  end\n\n  create_table \"builds\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.string \"ref\", limit: 40, null: false\n    t.string \"state\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.integer \"project_id\"\n    t.boolean \"merge_on_success\"\n    t.boolean \"build_failure_email_sent\", default: false, null: false\n    t.boolean \"promoted\"\n    t.string \"on_success_script_log_file\"\n    t.text \"error_details\"\n    t.boolean \"build_success_email_sent\", default: false, null: false\n    t.integer \"branch_id\"\n    t.string \"test_command\"\n    t.string \"initiated_by\"\n    t.text \"kochiku_yml_config\"\n    t.index [\"branch_id\"], name: \"index_builds_on_branch_id\"\n    t.index [\"project_id\"], name: \"index_builds_on_project_id\"\n    t.index [\"ref\", \"branch_id\"], name: \"index_builds_on_ref_and_branch_id\", unique: true\n    t.index [\"ref\", \"project_id\"], name: \"index_builds_on_ref_and_project_id\", unique: true\n  end\n\n  create_table \"projects\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.string \"name\"\n    t.string \"branch\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.integer \"repository_id\"\n    t.index [\"name\", \"branch\"], name: \"index_projects_on_name_and_branch\"\n    t.index [\"repository_id\"], name: \"index_projects_on_repository_id\"\n  end\n\n  create_table \"repositories\", id: :integer, force: :cascade, options: \"ENGINE=InnoDB DEFAULT CHARSET=utf8\" do |t|\n    t.string \"url\"\n    t.string \"test_command\"\n    t.datetime \"created_at\", null: false\n    t.datetime \"updated_at\", null: false\n    t.integer \"github_post_receive_hook_id\"\n    t.boolean \"run_ci\"\n    t.boolean \"build_pull_requests\"\n    t.string \"on_green_update\"\n    t.boolean \"send_build_failure_email\", default: true, null: false\n    t.integer \"timeout\", default: 40\n    t.string \"name\", null: false\n    t.boolean \"allows_kochiku_merges\", default: true\n    t.string \"host\", null: false\n    t.string \"namespace\"\n    t.boolean \"send_build_success_email\", default: true, null: false\n    t.boolean \"email_on_first_failure\", default: false, null: false\n    t.boolean \"send_merge_successful_email\", default: true, null: false\n    t.boolean \"enabled\", default: true, null: false\n    t.integer \"assume_lost_after\"\n    t.index [\"host\", \"namespace\", \"name\"], name: \"index_repositories_on_host_and_namespace_and_name\", unique: true\n    t.index [\"namespace\", \"name\"], name: \"index_repositories_on_namespace_and_name\", unique: true\n    t.index [\"url\"], name: \"index_repositories_on_url\"\n  end\n\nend\n"
  },
  {
    "path": "db/seeds.rb",
    "content": "# Eagerly load all of the models to avoid errors related to multiple threads\nDir[Rails.root.join(\"app/models/*.rb\")].each { |f| require f }\n\nrepo_infos = [\n  {\n    :name => 'kandan',\n    :enabled => true,\n    :location => \"git@github.com:kandanapp/kandan.git\",\n    :build_attempt_state => 'passed',\n    :types => [:spec, :cucumber, :rubocop, :lint, :unit]\n  },\n  {\n    :name => 'copycopter-server',\n    :enabled => true,\n    :build_attempt_state => 'passed',\n    :location => \"git@github.com:copycopter/copycopter-server.git\",\n    :types => [:spec]\n  },\n  {\n    :name => 'lobsters',\n    :enabled => false,\n    :build_attempt_state => 'errored',\n    :location => \"git@github.com:jcs/lobsters.git\",\n    :types => [:junit]\n  }\n]\n\n@builders = %w/\n  builder01.local builder02.local\n/\n\ndef artifact_directory\n  Rails.root.join('tmp')\nend\n\ndef write_the_sample_stdout_log_file\n  FileUtils.mkdir_p(artifact_directory)\n  name = artifact_directory.join('stdout.log')\n  File.open(name, 'w') do |file|\n    75.times { |i| file.puts \"Line #{i}\" }\n  end\n  name\nend\n\ndef sample_stdout_log_file\n  @sample_file ||= artifact_directory.join('stdout.log')\nend\n\ndef create_build_artifact(attempt)\n  BuildArtifact.create!(\n    log_file: sample_stdout_log_file.open,\n    build_attempt: attempt\n  )\nend\n\ndef create_build_part(build, kind, paths, build_attempt_state)\n  bp = BuildPart.create!(:build_instance => build,\n                         :kind => kind,\n                         :paths => paths,\n                         :queue => 'ci')\n  build_attempt_state ||= (BuildAttempt::STATES + ['passed'] * 5).sample\n  finished = if BuildAttempt::IN_PROGRESS_BUILD_STATES.include?(build_attempt_state)\n               nil\n             else\n               rand(7200).seconds.from_now\n             end\n  attempt = BuildAttempt.create!(\n    :build_part => bp,\n    :builder => @builders.sample,\n    :state => build_attempt_state,\n    :started_at => Time.current,\n    :finished_at => finished\n  )\n  create_build_artifact(attempt)\n  bp\nend\n\ndef create_build(branch, test_types, build_attempt_state: 'passed')\n  build = Build.create!(:branch_record => branch,\n                        :ref => SecureRandom.hex(20),\n                        :initiated_by => 'test@email.com',\n                        :state => 'runnable')\n\n  Array(test_types).each do |kind|\n    paths = %w(\n      spec/controllers/admin/users_controller_spec.rb\n      spec/jobs/merchant_location_update_job_spec.rb\n      spec/models/loyalty/payer_spec.rb\n      spec/views/mailers/application_mailer/interval_sales_report.text.plain.erb_spec.rb\n    )\n\n    10.times do\n      create_build_part(build, kind, paths, build_attempt_state)\n    end\n  end\n\n  build.update_state_from_parts!\nend\n\ndef populate_builds_for(branch, repo_info)\n  thread_list = []\n\n  10.times do\n    thread_list << Thread.new do\n      ActiveRecord::Base.connection_pool.with_connection do\n        create_build(branch, repo_info[:types], build_attempt_state: repo_info[:build_attempt_state])\n      end\n    end\n  end\n\n  thread_list.each { |t| t.join }\nend\n\nwrite_the_sample_stdout_log_file\n\nrepos = {}\nrepo_infos.each do |repo_info|\n  repository = Repository.create!({:url => repo_info[:location], :test_command => \"script/ci\", :run_ci => true, :enabled => repo_info[:enabled]})\n  repos[repo_info[:name]] = repository\n  master_branch = Branch.create!(:name => 'master', :convergence => true, :repository => repository)\n  populate_builds_for(master_branch, repo_info)\n  %w(feature-branch feature-branch2 feature-branch3).each do |b|\n    developer_branch = Branch.create!(:name => b, :convergence => false, :repository => repository)\n    populate_builds_for(developer_branch, repo_info)\n  end\nend\n\n# create an extra running build for copycopter-server to show something that is in progress\ncopycopter_branch = Branch.where(repository: repos['copycopter-server'], name: 'master').first\ncreate_build(copycopter_branch, %w(spec), build_attempt_state: 'running')\n"
  },
  {
    "path": "lib/build_strategies/no_op_build_strategy.rb",
    "content": "class BuildStrategy\n  class << self\n    def promote_build(build)\n    end\n\n    def merge_ref(ref)\n    end\n\n    def run_success_script(build)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/build_strategies/production_build_strategy.rb",
    "content": "require 'git_blame'\n\nclass BuildStrategy\n  class << self\n    # The primary function of promote_build is to update the branches specified\n    # in on_green_update field of Repository.\n    #\n    # A feature of promote_build is that it will not cause the promotion ref to\n    # move backwards. For instance, if build 1 finishes after build 2, we don't\n    # cause the promotion ref to move backwards by overwriting promotion_ref\n    # with build 1\n    #\n    # promote_build does use a force push in order to overwrite experimental\n    # branches that may have been manually placed on the promotion ref by a\n    # developer for testing.\n    def promote_build(build)\n      GitRepo.inside_repo(build.repository) do\n        build.repository.promotion_refs.each do |promotion_ref|\n          unless GitRepo.included_in_promotion_ref?(build.ref, promotion_ref)\n            update_branch(promotion_ref, build.ref)\n          end\n        end\n      end\n    end\n\n    def run_success_script(build)\n      GitRepo.inside_copy(build.repository, build.ref) do\n        # stderr is redirected to stdout so that all output is captured in the log\n        command = Cocaine::CommandLine.new(on_success_command(build), \"2>&1\", expected_outcodes: 0..255)\n        output = command.run\n        output += \"\\nExited with status: #{command.exit_status}\"\n        script_log = FilelessIO.new(output)\n        script_log.original_filename = \"on_success_script.log\"\n        build.on_success_script_log_file = script_log\n        build.save!\n      end\n    end\n\n    def merge_ref(build)\n      # If only Stash is used this could be just inside_repo\n      GitRepo.inside_copy(build.repository, \"master\") do\n        begin\n          emails = GitBlame.emails_in_branch(build)\n          merger = build.repository.remote_server.merge_executor.new(build)\n          merge_info = merger.merge_and_push\n\n          if build.repository.send_merge_successful_email?\n            MergeMailer.merge_successful(build, merge_info[:merge_commit], emails, merge_info[:log_output]).deliver_now\n          end\n\n          merger.delete_branch\n        rescue GitMergeExecutor::GitMergeFailedError => ex\n          MergeMailer.merge_failed(build, emails, ex.message).deliver_now\n        end\n      end\n    end\n\n    def update_branch(branch_name, ref_to_promote)\n      Cocaine::CommandLine.new(\"git push\", \"--force origin #{ref_to_promote}:refs/heads/#{branch_name}\").run\n    end\n\n    def on_success_command(build)\n      git_commit = build.ref\n      git_branch = build.branch_record.name\n      \"GIT_BRANCH=#{git_branch} GIT_COMMIT=#{git_commit} #{build.on_success_script}\"\n    end\n  end\nend\n"
  },
  {
    "path": "lib/capistrano/tasks/deploy.cap",
    "content": "# Users may choose to:\n#\n# A) edit the deploy tasks directly inside this file\n# B) create another .cap file and define your deploy:restart task inside\n#\n# Option B is recommended because it will make merging in upstream Kochiku\n# changes easy.\nnamespace :deploy do\n\n  # task :start do\n  #   on roles(:worker) do\n  #   end\n  #   on roles(:app) do\n  #   end\n  # end\n\n  # task :stop do\n  #   on roles(:worker) do\n  #   end\n  #   on roles(:app) do\n  #   end\n  # end\n\n  # task :restart do\n  #   on roles(:worker) do\n  #     # Necessary step to restart the Resque workers specific to your\n  #     # deployment\n  #   end\n  #   on roles(:app) do\n  #     # Example restart step for a Phusion Passenger deployment\n  #     execute :touch, \"#{current_release}/tmp/restart.txt\"\n  #   end\n  # end\n\n  task :overwrite_database_yml do\n    on roles(:app) do |host|\n      execute :mv, \"#{release_path}/config/database.production.yml\", \"#{release_path}/config/database.yml\"\n    end\n  end\nend\n# vi: filetype=ruby\n"
  },
  {
    "path": "lib/capistrano/tasks/kochiku.cap",
    "content": "namespace :kochiku do\n  task :setup do\n    on roles([:app, :worker]) do\n      SSHKit.config.command_map.prefix[:gem].pop    #pop off 'bundle exec'\n\n      execute :gem, \"install\", \"bundler\",  \"--conservative\", \"-v\", \"1.3\"\n      execute \"mkdir -p #{shared_path}/build-partition #{shared_path}/log_files\"\n    end\n  end\n\n  task :symlinks do\n    on roles([:app, :worker]) do\n      execute :mkdir, release_path.join('tmp')\n      execute :ln, '-nfFs', shared_path.join('build-partition'), release_path.join('tmp/build-partition')\n      execute :ln, '-nfFs', shared_path.join('log_files'), release_path.join('public/log_files')\n      execute :ln, '-nfFs', shared_path.join('secrets'), release_path.join('config/secrets')\n    end\n  end\nend\n# vi: filetype=ruby\n"
  },
  {
    "path": "lib/fileless_io.rb",
    "content": "require 'stringio'\nclass FilelessIO < StringIO\n  attr_accessor :original_filename\nend\n"
  },
  {
    "path": "lib/git_blame.rb",
    "content": "require 'cocaine'\nrequire 'git_repo'\n\nclass GitBlame\n  class << self\n    def emails_since_last_green(build)\n      lookup_git_names_and_emails(git_names_and_emails_since_last_green(build))\n    end\n\n    def emails_in_branch(build)\n      lookup_git_names_and_emails(git_names_and_emails_in_branch(build))\n    end\n\n    def last_email_in_branch(build)\n      lookup_git_names_and_emails(last_git_name_and_email_in_branch(build))\n    end\n\n    def changes_since_last_green(build)\n      output = GitRepo.inside_repo(build.repository) do\n        # TODO: Push this down into GitRepo and integration test it.\n        Cocaine::CommandLine.new(\"git log --cc --format='::!::%H|%cn <%ce>|%cd|%B::!::' '#{build.previous_successful_build.try(:ref)}...#{build.ref}'\").run\n      end\n      parse_git_changes(output)\n    end\n\n    def changes_in_branch(build)\n      output = GitRepo.inside_repo(build.repository) do\n        # TODO: Push this down into GitRepo and integration test it.\n        Cocaine::CommandLine.new(\"git log --cc --format='::!::%H|%cn <%ce>|%cd|%B::!::' 'master..#{build.branch_record.name}'\").run\n      end\n      parse_git_changes(output)\n    end\n\n    def files_changed_since_last_build(build, fetch_emails: false, sync: true)\n      output = GitRepo.inside_repo(build.repository, sync: sync) do\n        Cocaine::CommandLine.new(\"git log --cc --format='::!::%an:%ae::!::' --name-only '#{build.previous_build.try(:ref)}...#{build.ref}'\").run\n      end\n      parse_git_files_changes(output, fetch_emails: fetch_emails)\n    end\n\n    def files_changed_since_last_green(build, fetch_emails: false)\n      output = GitRepo.inside_repo(build.repository) do\n        # TODO: Push this down into GitRepo and integration test it.\n        Cocaine::CommandLine.new(\"git log --cc --format='::!::%cn:%ce::!::' --name-only '#{build.previous_successful_build.try(:ref)}...#{build.ref}'\").run\n      end\n      parse_git_files_changes(output, fetch_emails: fetch_emails)\n    end\n\n    def files_changed_in_branch(build, fetch_emails: false, sync: true)\n      output = GitRepo.inside_repo(build.repository, sync: sync) do\n        # TODO: Push this down into GitRepo and integration test it.\n        Cocaine::CommandLine.new(\"git log --cc --format='::!::%cn:%ce::!::' --name-only 'master..#{build.branch_record.name}'\").run\n      end\n      parse_git_files_changes(output, fetch_emails: fetch_emails)\n    end\n\n    # net_files_changed_in_branch counts only files which have a net diff in the branch. If a branch includes a commit\n    # to modify a file, and then a revert commit, that file will not be included in this list.\n    def net_files_changed_in_branch(build, sync: true)\n      # get revision of shared ancestor of master and build branch, i.e. the commit at which point this branch was created\n      common_ancestor = GitRepo.inside_repo(build.repository, sync: sync) do\n        Cocaine::CommandLine.new(\"git merge-base master #{build.branch_record.name}\").run\n      end\n\n      output = GitRepo.inside_repo(build.repository, sync: sync) do\n        Cocaine::CommandLine.new(\"git diff --name-status --find-renames --find-copies '#{common_ancestor.strip}..#{build.branch_record.name}'\").run\n      end\n      parse_git_changes_by_name_status(output)\n    end\n\n    private\n\n    def email_from_git_email(email)\n      if email =~ /^#{Settings.git_pair_email_prefix}\\+/\n        localpart, domain = email.split('@')\n        usernames = localpart.strip.split('+')\n        usernames[1..-1].map { |username| \"#{username}@#{domain}\" }\n      else\n        email\n      end\n    end\n\n    def git_names_and_emails_since_last_green(build)\n      GitRepo.inside_repo(build.repository) do\n        Cocaine::CommandLine.new(\"git log --format='%cn:%ce' '#{build.previous_successful_build.try(:ref)}...#{build.ref}'\").run.split(\"\\n\")\n      end\n    end\n\n    def git_names_and_emails_in_branch(build)\n      GitRepo.inside_repo(build.repository) do\n        if GitRepo.branch_exist?(build.branch_record.name)\n          Cocaine::CommandLine.new(\"git log --format='%cn:%ce' 'master..#{build.branch_record.name}'\").run.split(\"\\n\")\n        else\n          []\n        end\n      end\n    end\n\n    def last_git_name_and_email_in_branch(build)\n      GitRepo.inside_repo(build.repository) do\n        Cocaine::CommandLine.new(\"git log --format='%cn:%ce' -1 '#{build.branch_record.name}'\").run.strip\n      end\n    end\n\n    def lookup_git_names_and_emails(git_names_and_emails)\n      Array(git_names_and_emails).map do |git_name_and_email|\n        _name, email = git_name_and_email.split(\":\")\n        Array(email_from_git_email(email))\n      end.flatten.compact.uniq\n    end\n\n    def parse_git_changes(output)\n      output.split(\"::!::\").each_with_object([]) do |line, git_changes|\n        commit_hash, author, commit_date, commit_message = line.chomp.split(\"|\")\n        next if commit_hash.nil? || commit_message.nil?\n        git_changes << {:hash => commit_hash, :author => author, :date => commit_date, :message => commit_message.tr(\"\\n\", \" \")}\n      end\n    end\n\n    def parse_git_files_changes(output, fetch_emails: false)\n      email_addresses = []\n      output.split(\"\\n\").each_with_object([]) do |line, file_changes|\n        next if line.empty?\n        if line.start_with?(\"::!::\")\n          email_addresses = []\n          if fetch_emails\n            line.split(\"::!::\").each do |line_part|\n              next if line_part.nil? || line_part.empty?\n              _name, email = line_part.split(\":\")\n              email_addresses = email_addresses + Array(email_from_git_email(email))\n            end\n            email_addresses.compact!\n          end\n        else\n          file_changes << {:file => line, :emails => email_addresses}\n        end\n      end\n    end\n\n    def parse_git_changes_by_name_status(output)\n      output.split(\"\\n\").each_with_object([]) do |line, file_changes|\n        next if line.empty?\n\n        # Format from --name-status prints a status code, then any file names after that.\n        # For example, a file rename that has 85% similarity would be output as:\n        #   R085   path/to/original_name.java   path/to/new_name.java\n        # We can safely reject the first element (status code) in the split array.\n        files_in_line = line.split[1..-1]\n\n        files_in_line.each { |file| file_changes << {file: file, emails: []} }\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/git_merge_executor.rb",
    "content": "require 'open3'\n\nclass GitMergeExecutor\n  class GitFetchFailedError < StandardError; end\n  class GitMergeFailedError < StandardError; end\n  class GitPushFailedError < StandardError; end\n\n  def initialize(build)\n    if build.branch_record.convergence?\n      raise \"attempted to merge #{build.branch_record.name} which is a convergence branch and is ineligible for merge by Kochiku\"\n    end\n    @build = build\n  end\n\n  # Public: Merges the branch associated with a build into the master branch\n  # and pushes the result to remote git repo. If the merge is unsuccessful for\n  # any reason the merge is aborted and an exception is raised.\n  def merge_and_push\n    Rails.logger.info(\"Trying to merge branch: #{@build.branch_record.name} to master after build id: #{@build.id}\")\n\n    begin\n      git_fetch_and_reset\n\n      merge_commit_sha, merge_log = merge_to_master\n\n      push_log = push_to_remote\n    rescue GitFetchFailedError, GitPushFailedError\n      tries = (tries || 0) + 1\n      if tries < 3\n        sleep(10 * tries)\n        retry\n      else\n        raise\n      end\n    end\n\n    { merge_commit: merge_commit_sha, log_output: [merge_log, push_log].join(\"\\n\") }\n  end\n\n  def delete_branch\n    begin\n      git_fetch_and_reset\n\n      delete_log, status = Open3.capture2e(\"git push --porcelain --delete origin #{@build.branch_record.name}\")\n      unless status.success?\n        Rails.logger.warn(\"Deletion of branch #{@build.branch_record.name} failed\")\n        Rails.logger.warn(delete_log)\n      end\n    rescue GitFetchFailedError\n      Rails.logger.warn(\"Deletion of branch #{@build.branch_record.name} failed\")\n    end\n  end\n\n  private\n\n  def git_fetch_and_reset\n    checkout_log, status = Open3.capture2e(\"git fetch && git checkout master && git reset --hard origin/master\")\n    unless status.success?\n      raise_and_log(GitFetchFailedError, \"Error occurred while reseting to origin/master:\", checkout_log)\n    end\n  end\n\n  def merge_to_master\n    commit_message = \"Kochiku merge of branch #{@build.branch_record.name} for build id: #{@build.id} ref: #{@build.ref}\"\n    merge_log, status = Open3.capture2e(merge_env, \"git merge --no-ff -m '#{commit_message}' #{@build.ref}\")\n\n    unless status.success?\n      Open3.capture2e(\"git merge --abort\")\n      raise_and_log(GitMergeFailedError, \"Was unable to merge your branch:\", merge_log)\n    end\n\n    newest_sha, _status = Open3.capture2e(\"git rev-parse master\")\n    [newest_sha.chomp, merge_log]\n  end\n\n  def push_to_remote\n    push_log, status = Open3.capture2e(\"git push --porcelain origin master\")\n\n    unless status.success?\n      raise_and_log(GitPushFailedError, \"git push of branch #{@build.branch_record.name} failed:\", push_log)\n    end\n\n    push_log\n  end\n\n  def raise_and_log(error_class, error_info, command_output)\n    message = \"#{error_info}\\n\\n#{command_output}\"\n    Rails.logger.error(message)\n    raise(error_class, message)\n  end\n\n  def merge_env\n    author_name  = \"kochiku-merger\"\n    author_email = \"noreply+kochiku-merger@#{Settings.domain_name}\"\n    {\"GIT_AUTHOR_NAME\" => author_name, \"GIT_COMMITTER_NAME\" => author_name,\n     \"GIT_AUTHOR_EMAIL\" => author_email, \"GIT_COMMITTER_EMAIL\" => author_email}\n  end\nend\n"
  },
  {
    "path": "lib/git_repo.rb",
    "content": "require 'cocaine'\nrequire 'fileutils'\n\nclass GitRepo\n  class RefNotFoundError < StandardError; end\n\n  WORKING_DIR = Rails.root.join('tmp', 'build-partition')\n\n  class << self\n    def inside_copy(repository, sha)\n      cached_repo_path = cached_repo_for(repository)\n\n      synchronize_cache_repo(cached_repo_path)\n\n      Dir.mktmpdir(nil, WORKING_DIR) do |dir|\n        Cocaine::CommandLine.new(\"git clone\", \"--config remote.origin.pushurl=#{repository.url} #{cached_repo_path} #{dir}\").run\n\n        Dir.chdir(dir) do\n          raise RefNotFoundError, \"repo:#{repository.url}, sha:#{sha}\" unless system(\"git rev-list --quiet -n1 #{sha}\")\n\n          Cocaine::CommandLine.new(\"git checkout\", \"--quiet :commit\").run(commit: sha)\n\n          yield dir\n        end\n      end\n    end\n\n    def inside_repo(repository, sync: true)\n      cached_repo_path = cached_repo_for(repository)\n\n      Dir.chdir(cached_repo_path) do\n        synchronize_with_remote('origin') if sync\n\n        yield\n      end\n    end\n\n    def load_kochiku_yml(repository, ref)\n      inside_repo(repository) do\n        raise RefNotFoundError, \"repo:#{repository.url}, sha:#{ref}\" unless system(\"git rev-list --quiet -n1 #{ref}\")\n\n        read_repo_config(ref)\n      end\n    end\n\n    def included_in_promotion_ref?(build_ref, promotion_ref)\n      # --is-ancestor was added in git 1.8.0\n      # exit ->   1: not an ancestor\n      # exit -> 128: the commit does not exist\n      ancestor_cmd = Cocaine::CommandLine.new(\"git merge-base\", \"--is-ancestor #{build_ref} #{promotion_ref}\", :expected_outcodes => [0, 1, 128])\n      ancestor_cmd.run\n      ancestor_cmd.exit_status == 0\n    end\n\n    def branch_exist?(branch)\n      exist_cmd = Cocaine::CommandLine.new(\"git rev-parse\", \"--verify --quiet #{branch}\", expected_outcodes: [0, 1])\n      exist_cmd.run\n      exist_cmd.exit_status == 0\n    end\n\n    private\n\n    KOCHIKU_YML_LOCS = [\n      'kochiku.yml',\n      'config/kochiku.yml',\n      'config/ci/kochiku.yml',\n    ].freeze\n\n    def read_repo_config(ref)\n      command = Cocaine::CommandLine.new(\"git show\", \":ref::file\",\n                                         { :swallow_stderr => true, :expected_outcodes => [0, 128] })\n      KOCHIKU_YML_LOCS.each do |loc|\n        file = command.run(:ref => ref, :file => loc)\n        return YAML.load(file) if command.exit_status == 0\n      end\n      nil\n    end\n\n    def cached_repo_for(repository)\n      cached_repo_path = WORKING_DIR.join(repository.namespace, \"#{repository.name}.git\")\n\n      if !cached_repo_path.directory?\n        FileUtils.mkdir_p(WORKING_DIR.join(repository.namespace))\n        clone_bare_repo(repository, cached_repo_path)\n      else\n        harmonize_remote_url(cached_repo_path, repository.url_for_fetching)\n      end\n\n      cached_repo_path\n    end\n\n    # Update the remote url for the git repository if it has changed\n    def harmonize_remote_url(cached_repo_path, expected_url)\n      Dir.chdir(cached_repo_path) do\n        remote_url = Cocaine::CommandLine.new(\"git config\", \"--get remote.origin.url\").run.chomp\n        if remote_url != expected_url\n          Rails.logger.info \"#{remote_url.inspect} does not match #{expected_url.inspect}. Updating it.\"\n          Cocaine::CommandLine.new(\"git remote\", \"set-url origin #{expected_url}\").run\n        end\n      end\n      nil\n    end\n\n    def synchronize_cache_repo(cached_repo_path)\n      Dir.chdir(cached_repo_path) do\n        # update the cached repo\n        synchronize_with_remote('origin')\n      end\n    end\n\n    def clone_bare_repo(repo, cached_repo_path)\n      # Note: the --config option was added in git 1.7.7\n      Cocaine::CommandLine.new(\n        \"git clone\",\n        \"--bare --quiet --config remote.origin.pushurl=#{repo.url} --config remote.origin.fetch='+refs/heads/*:refs/heads/*' --config remote.origin.tagopt='--no-tags' #{repo.url_for_fetching} #{cached_repo_path}\"\n      ).run\n    end\n\n    def synchronize_with_remote(name)\n      Cocaine::CommandLine.new(\"git fetch\", \"--quiet --prune #{name}\").run\n    rescue Cocaine::ExitStatusError => e\n      # likely caused by another 'git fetch' that is currently in progress. Wait a few seconds and try again\n      tries = (tries || 0) + 1\n      if tries < 3\n        Rails.logger.warn(e)\n        sleep(15 * tries)\n        retry\n      else\n        raise e\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/github_commit_status.rb",
    "content": "require 'github_request'\n\nclass GithubCommitStatus\n  def initialize(build, oauth_token)\n    @oauth_token = oauth_token\n    @url = \"#{build.repository.base_api_url}/statuses/#{build.ref}\"\n    @build = build\n    @build_url = Rails.application.routes.url_helpers.repository_build_url(build.repository, build)\n  end\n\n  def update_commit_status!\n    if @build.succeeded?\n      mark_as(\"success\", \"Build passed!\")\n    elsif @build.failed? || @build.aborted?\n      mark_as(\"failure\", \"Build failed\")\n    else\n      mark_as(\"pending\", \"Build is running\")\n    end\n  end\n\n  private\n\n  def mark_as(state, description)\n    GithubRequest.post(@url, {:state => state, :target_url => @build_url, :description => description}, @oauth_token)\n  end\nend\n"
  },
  {
    "path": "lib/github_post_receive_hook.rb",
    "content": "# frozen_string_literal: true\nrequire 'github_request'\n\nclass GithubPostReceiveHook\n  SUBSCRIBE_NAME = \"web\"\n\n  def initialize(repository, oauth_token)\n    @repository = repository\n    @oauth_token = oauth_token\n    @root_url = \"#{repository.base_api_url}/hooks\"\n    @hook_url = \"#{repository.base_api_url}/hooks/#{repository.github_post_receive_hook_id}\"\n    @receive_url = Rails.application.routes.url_helpers.pull_request_build_url\n    @interested_events = @repository.interested_github_events\n    @subscribe_args = {:name => \"web\", :config => {:url => @receive_url}, :events => @interested_events, :active => true}\n  end\n\n  def subscribe!\n    if @repository.github_post_receive_hook_id\n      update_repository_hook!\n    else\n      synchronize_or_create!\n    end\n  end\n\n  private\n\n  def update_repository_hook!\n    begin\n      GithubRequest.patch(@hook_url, @subscribe_args, @oauth_token)\n    rescue GithubRequest::ResponseError => e\n      if e.response.class == Net::HTTPNotFound\n        create_hook\n      else\n        raise e\n      end\n    end\n  end\n\n  def synchronize_or_create!\n    begin\n      response_body = GithubRequest.get(@root_url, @oauth_token)\n      existing_hooks = JSON.parse(response_body)\n      existing_subscription = existing_hooks.detect do |hook|\n        hook[\"active\"] && hook[\"events\"] == @interested_events && hook[\"config\"][\"url\"] == @receive_url\n      end\n      if existing_subscription\n        @repository.update_attributes(:github_post_receive_hook_id => existing_subscription[\"id\"])\n        return response_body\n      end\n    rescue GithubRequest::ResponseError\n      Rails.logger.info(\"Failed to get hooks for #{@root_url}\")\n    end\n\n    create_hook\n  end\n\n  def create_hook\n    GithubRequest.post(@root_url, @subscribe_args, @oauth_token)\n  end\nend\n"
  },
  {
    "path": "lib/github_request.rb",
    "content": "require 'uri'\nrequire 'net/http'\n\nclass GithubRequest\n  class ResponseError < RuntimeError\n    attr_accessor :response\n  end\n\n  def self.get(url, oauth_token)\n    uri = URI(url)\n    request = Net::HTTP::Get.new(uri.request_uri)\n    request[\"Authorization\"] = \"token #{oauth_token}\"\n    request[\"Accept\"] = \"application/vnd.github.v3+json\"\n    make_request(uri, request)\n  end\n\n  def self.post(url, data, oauth_token)\n    uri = URI(url)\n    request = Net::HTTP::Post.new(uri.request_uri)\n    request.body = data.to_json\n    request[\"Authorization\"] = \"token #{oauth_token}\"\n    request[\"Accept\"] = \"application/vnd.github.v3+json\"\n    request[\"Content-Type\"] = \"application/json; charset=utf-8\"\n    make_request(uri, request)\n  end\n\n  def self.patch(url, data, oauth_token)\n    uri = URI(url)\n    request = Net::HTTP::Patch.new(uri.request_uri)\n    request.body = data.to_json\n    request[\"Authorization\"] = \"token #{oauth_token}\"\n    request[\"Accept\"] = \"application/vnd.github.v3+json\"\n    request[\"Content-Type\"] = \"application/json; charset=utf-8\"\n    make_request(uri, request)\n  end\n\n  def self.make_request(uri, request_object)\n    Rails.logger.info(\"Github request: #{request_object.method}, #{uri}\")\n    body = nil\n    Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|\n      response = http.request(request_object)\n      body = response.body\n      Rails.logger.info(\"Github response: #{response.inspect}\")\n      Rails.logger.info(\"Github response body: #{body.inspect}\")\n      unless response.is_a? Net::HTTPSuccess\n        response_error = ResponseError.new(\"response: #{response.class} body: #{body}\")\n        response_error.response = response\n        raise response_error\n      end\n    end\n    body\n  end\n  private_class_method :make_request\nend\n"
  },
  {
    "path": "lib/partitioner/base.rb",
    "content": "module Partitioner\n  class Base\n    def initialize(build, kochiku_yml)\n      @build = build\n      @kochiku_yml = kochiku_yml\n    end\n\n    def partitions\n      [\n        {\n          'type' => 'test',\n          'files' => ['no-manifest'],\n          'queue' => @build.branch_record.convergence? ? 'ci' : 'developer',\n          'retry_count' => 0\n        }\n      ]\n    end\n\n    def emails_for_commits_causing_failures\n      {}\n    end\n\n    def partitioner_type\n      self.class.name.gsub(/^Partitioner::/, '')\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner/default.rb",
    "content": "require 'partitioner/base'\n\nmodule Partitioner\n  # This is the origional partitioner behavior, which is somewhat ruby targeted\n  class Default < Base\n    def partitions\n      GitRepo.inside_copy(@build.repository, @build.ref) do\n        # Handle old kochiku.yml\n        if @kochiku_yml.is_a?(Array)\n          @kochiku_yml.map { |subset| partitions_for(subset) }.flatten\n        else\n          build_partitions\n        end\n      end\n    end\n\n    private\n\n    def max_build_time\n      if @kochiku_yml.is_a?(Array)\n        @kochiku_yml\n      else\n        @kochiku_yml.fetch('targets')\n      end.map do |subset|\n        file_to_times_hash = load_manifest(subset['time_manifest'])\n        file_to_times_hash.values if file_to_times_hash.is_a?(Hash)\n      end.flatten.compact.max\n    end\n\n    def build_partitions\n      if @kochiku_yml['ruby']\n        @kochiku_yml['ruby'].flat_map do |ruby|\n          build_targets(ruby)\n        end\n      else\n        build_targets\n      end\n    end\n\n    def build_targets(ruby_version = nil)\n      options = {}\n      options['ruby'] = ruby_version if ruby_version\n      options['log_file_globs'] = Array(@kochiku_yml['log_file_globs']) if @kochiku_yml['log_file_globs']\n\n      @kochiku_yml['targets'].flat_map do |subset|\n        partitions_for(\n          subset.merge('options' => options.clone)\n        )\n      end\n    end\n\n    def get_file_parts_for(subset)\n      glob = subset.fetch('glob', '/dev/null')\n      manifest = subset['manifest']\n      workers = subset.fetch('workers', 1)\n\n      strategy = subset.fetch('balance', 'round_robin')\n      strategy = 'round_robin' unless Strategies.respond_to?(strategy) # override if specified strategy is invalid\n\n      files = Array(load_manifest(manifest)) | Dir[*glob]\n\n      file_to_times_hash = load_manifest(subset['time_manifest'])\n\n      balanced_partitions = if file_to_times_hash.is_a?(Hash)\n                              @max_time ||= max_build_time\n                              time_greedy_partitions_for(file_to_times_hash, files, workers)\n                            else\n                              []\n                            end\n\n      files -= balanced_partitions.flatten\n\n      Strategies.send(strategy, files, workers) + balanced_partitions\n    end\n\n    def partitions_for(subset)\n      type = subset.fetch('type', 'test')\n      retry_count = subset['retry_count'] || 0\n      if subset['log_file_globs']\n        subset['options']['log_file_globs'] = Array(subset['log_file_globs'])\n      end\n\n      queue = @build.branch_record.convergence? ? \"ci\" : \"developer\"\n      queue_override = subset.fetch('queue_override', nil)\n      queue = \"#{queue}-#{queue_override}\" if queue_override.present?\n\n      subset_part_files = subset['target'] ? [Array(subset['target'])] : get_file_parts_for(subset)\n\n      subset_part_files.map do |part_files|\n        {'type' => type, 'files' => part_files.compact, 'queue' => queue,\n         'retry_count' => retry_count, 'options' => subset['options']}\n      end.select { |p| p['files'].present? }\n    end\n\n    # Balance tests by putting each test into the worker with the shortest expected execution time\n    # If a test that no longer exists is referenced in the file_to_times_hash, do not include it in\n    # the list of tests to be executed.  If there are new tests not included in the file_to_times_hash,\n    # assume they will run fast.\n    def time_greedy_partitions_for(file_to_times_hash, all_files, workers)\n      # exclude tests that are not present\n      file_to_times_hash.slice!(*all_files)\n      min_test_time = file_to_times_hash.values.flatten.min || 1\n      setup_time = min_test_time / 2\n\n      files_by_worker = []\n      runtimes_by_worker = []\n\n      file_to_times_hash.to_a.sort_by { |a| a.last.max }.reverse_each do |file, times|\n        file_runtime = times.max\n        if runtimes_by_worker.length < workers\n          files_by_worker << [file]\n          runtimes_by_worker << file_runtime\n        else\n          _fastest_worker_time, fastest_worker_index = runtimes_by_worker.each_with_index.min\n          files_by_worker[fastest_worker_index] << file\n          runtimes_by_worker[fastest_worker_index] += file_runtime - setup_time\n        end\n      end\n\n      # Add any missing files\n      missing_files = all_files - file_to_times_hash.keys\n      files_by_worker = files_by_worker.zip(missing_files.in_groups(workers)).map(&:flatten).map(&:compact)\n\n      files_by_worker\n    end\n\n    def load_manifest(file_name)\n      YAML.load_file(file_name) if file_name\n    end\n\n    module Strategies\n      class << self\n        def alphabetically(files, workers)\n          files.in_groups(workers)\n        end\n\n        def isolated(files, workers)\n          files.in_groups_of(1)\n        end\n\n        def round_robin(files, workers)\n          files.in_groups_of(workers).transpose\n        end\n\n        def shuffle(files, workers)\n          files.shuffle.in_groups(workers)\n        end\n\n        def size(files, workers)\n          files.sort_by { |path| File.size(path) }.reverse.in_groups_of(workers).transpose\n        end\n\n        def size_greedy_partitioning(files, workers)\n          files = files.sort_by { |path| 0 - File.size(path) }\n          numbers = (0...workers).to_a\n          results = numbers.map { [] }\n          sizes = numbers.map { 0 }\n          files.each do |file|\n            dest = numbers.sort_by { |n| sizes[n] }.first\n            sizes[dest] += File.size(file)\n            results[dest] << file\n          end\n          return results\n        end\n\n        def size_average_partitioning(files, workers)\n          threshold = files.sum { |file| File.size(file) } / workers\n          results = []\n          this_bucket = []\n          this_bucket_size = 0\n\n          files.each do |file|\n            if this_bucket_size > threshold && results.size < workers\n              results << this_bucket\n              this_bucket = []\n              this_bucket_size = this_bucket_size - threshold\n            end\n\n            this_bucket << file\n            this_bucket_size += File.size(file)\n          end\n\n          results << this_bucket\n          return results\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner/dependency_map.rb",
    "content": "require 'partitioner/default'\nrequire 'git_blame'\n\nmodule Partitioner\n  # A variation on Partitioner::Default, which allows builds to run a subset of tests based on the files changed\n  # on its branch.\n  #\n  # Accepts all the same configuration options as Partitioner::Default, and optionally accepts a dependency_map for\n  # each specified test target.\n  #\n  # Sample excerpt from `kochiku.yml` with options:\n  #\n  # ```yml\n  # partitioner: dependency_map\n  #\n  # dependency_map_options:\n  #   # Branches with these names will include files that match every test_glob, regardless of files changed on branch\n  #   run_all_tests_for_branches:\n  #     - master\n  #\n  # targets:\n  #   - dependency_map:\n  #       # For each object in the dependency_map array, if its source_glob matches files changed on this branch,\n  #       # add files that match its test_glob to the files that should be used in the partitions\n  #       - source_glob: foo/**\n  #         test_glob: foo/**/*spec.rb\n  #         workers: 1  # Add this many workers if this source_glob matches files changed on this branch\n  #\n  #       - source_glob:\n  #           - bar/**\n  #           - app/bar/**\n  #         test_glob:\n  #           - bar/**/*spec.rb\n  #           - spec/bar/**/*spec.rb\n  #         workers: 5\n  #\n  #       - source_glob: *\n  #         test_glob: baz/*spec.rb\n  #\n  #     # If a target specifies a default_test_glob and none of its specified source_globs match files changed,\n  #     # add files that match its default_test_glob to the files that should be used in the partitions\n  #     default_test_glob:\n  #       - {foo,bar}/**/*spec.rb\n  #       - spec/bar/**/*spec.rb\n  #       - baz/*spec.rb\n  #\n  #     # Maximum number of workers for this partition\n  #     workers: 30\n  # ```\n  class DependencyMap < Default\n    private\n\n    KOCHIKU_YML_LOCS = %w(kochiku.yml config/kochiku.yml config/ci/kochiku.yml).freeze\n\n    # Indicates whether this build should run all test files, or only those test files which map to source files\n    # that have changed in this branch\n    def should_run_all_tests\n      @should_run_all_tests ||= (\n        # Run all tests if kochiku.yml is formatted the old way (as an array)\n        return true if @kochiku_yml.is_a?(Array)\n\n        # Run all tests if this branch name is included in dependency_map_options.run_all_tests_for_branches\n        branches_that_run_all_tests = @kochiku_yml\n                                      .fetch('dependency_map_options', {})\n                                      .fetch('run_all_tests_for_branches', [])\n\n        [*branches_that_run_all_tests].include?(@build.branch_record.name)\n      )\n    end\n\n    # Overrides Partitioner::Default#get_file_parts_for. Decides which test files to include in the partitions\n    # based on dependency_map option in each test target.\n    def get_file_parts_for(subset)\n      glob = subset.fetch('glob', '/dev/null')\n      manifest = subset['manifest']\n      workers = subset.fetch('workers', 1)\n\n      strategy = subset.fetch('balance', 'round_robin')\n      strategy = 'round_robin' unless Strategies.respond_to?(strategy) # override if specified strategy is invalid\n\n      dependency_map = subset['dependency_map']\n      default_test_glob = subset['default_test_glob']\n\n      if dependency_map.present?\n        if should_run_all_tests\n          test_globs_to_add = dependency_map.map { |dependency| dependency.fetch('test_glob', '') } << default_test_glob\n        else\n          test_globs_to_add = []\n          workers_for_dependency_map = 0\n\n          changed_files = GitBlame.net_files_changed_in_branch(@build).map { |file_object| file_object[:file] }\n\n          # If a source_glob matches the changed files on this branch, add its test_glob to the partition\n          dependency_map.each do |dependency|\n            source_globs = [*dependency.fetch('source_glob', '')]\n\n            matched_files = changed_files.select { |path| source_globs.any? { |pattern| File.fnmatch(pattern, path) } }\n\n            unless matched_files.empty?\n              test_globs_to_add << dependency.fetch('test_glob', '')\n              workers_for_dependency_map += dependency.fetch('workers', 0)\n            end\n          end\n\n          # If no source_globs matched the changed files on this branch, add the default_test_glob to the partition\n          if test_globs_to_add.empty?\n            test_globs_to_add << default_test_glob\n          end\n\n          # If workers were added for the source_globs that matched, and the total is less than the maximum number\n          # of workers allotted for this target, use that amount of workers to build the partitions\n          if workers_for_dependency_map > 0\n            workers = [workers, workers_for_dependency_map].min\n          end\n        end\n\n        test_globs_to_add.flatten!\n        files = Dir[*test_globs_to_add]\n      elsif default_test_glob.present?\n        files = Dir[*default_test_glob]\n      else\n        files = Array(load_manifest(manifest)) | Dir[*glob]\n      end\n\n      return [] if files.empty?\n\n      file_to_times_hash = load_manifest(subset['time_manifest'])\n\n      balanced_partitions = if file_to_times_hash.is_a?(Hash)\n                              @max_time ||= max_build_time\n                              time_greedy_partitions_for(file_to_times_hash, files, workers)\n                            else\n                              []\n                            end\n\n      files -= balanced_partitions.flatten\n\n      Strategies.send(strategy, files, workers) + balanced_partitions\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner/go.rb",
    "content": "# frozen_string_literal: true\n\nrequire 'cocaine'\nrequire 'fileutils'\nrequire 'json'\nrequire 'set'\nrequire 'partitioner/base'\n\nmodule Partitioner\n  # This partitioner shards Go repos\n  # Example usage\n  ################################\n  # partitioner: go\n  # go_partitioner_settings:\n  #   ignore_paths:\n  #     - kochiku.yml\n  #   all_packages:\n  #     test:\n  #       # All others will run on 1 worker\n  #       items : 4\n  #       inventory2: 2\n  #     custom_go: 4\n  #   top_level_packages:\n  #     build: 4\n  #     static_analysis: 1\n  #   package_prefix: \"square/up\"\n  ################################\n\n  class Go < Base\n    class DependencyError < StandardError; end\n    def initialize(build, kochiku_yml)\n      @build = build\n      @options = {}\n      @settings = {}\n      if kochiku_yml\n        @settings = kochiku_yml['go_partitioner_settings'] if kochiku_yml['go_partitioner_settings']\n        @options['log_file_globs'] = Array(kochiku_yml['log_file_globs']) if kochiku_yml['log_file_globs']\n        @options['retry_count'] = kochiku_yml['retry_count'] if kochiku_yml['retry_count']\n      end\n\n      # Go package prefix (e.g., \"square/up\").\n      @package_prefix = @settings['package_prefix'] ? File.join(@settings['package_prefix'], '') : ''\n    end\n\n    def partitions\n      Rails.logger.info(\"Partition started: [#{all_packages_target_types} #{top_level_packages_target_types}] #{@build.ref}\")\n      start = Time.current\n      packages_to_build = []\n\n      files_changed_method = @build.branch_record.convergence? ? :files_changed_since_last_build : :files_changed_in_branch\n\n      GitBlame.public_send(files_changed_method, @build, sync: false).each do |file_and_emails|\n        file_path = file_and_emails[:file]\n        next if @settings.fetch('ignore_paths', []).detect { |dir| file_path.start_with?(dir) }\n\n        # build all for top level file changes\n        dir_path = File.dirname(file_path)\n        return add_partitions(all_packages) if dir_path == \".\"\n\n        packages_to_build += file_to_packages(file_path)\n      end\n\n      packages_to_build += failed_convergence_tests\n      add_partitions(packages_to_build.uniq)\n    ensure\n      Rails.logger.info(\"Partition finished: [#{all_packages_target_types} #{top_level_packages_target_types}] #{Time.current - start} #{@build.ref}\")\n    end\n\n    def file_to_packages(file_path)\n      dir_path = File.dirname(file_path)\n      if file_path.end_with? '.go'\n        path_affected_by_file = @package_prefix + dir_path\n        return Array(depends_on_map[path_affected_by_file])\n      # if its not a go file run all tests in top-level package\n      else\n        top_level_package = dir_path.split(\"/\").first\n        return Array(top_level_package_map[@package_prefix + top_level_package])\n      end\n    end\n\n    def failed_convergence_tests\n      # add in the packages that failed previously if its the convergence branch\n      if @build.branch_record.convergence? && @build.previous_build\n        previous_failures = @build.previous_build.build_parts.select(&:unsuccessful?).map(&:paths).flatten.uniq\n        previous_failures.map! { |path| @package_prefix + path }\n      end\n      previous_failures || []\n    end\n\n    # Run for each packages.\n    def all_packages_target_types\n      @all_packages_target_types ||= @settings['all_packages'] || {test: 1}\n    end\n\n    # Run only for the top-level package\n    def top_level_packages_target_types\n      @top_level_packages_target_types ||= @settings['top_level_packages'] || {build: 1}\n    end\n\n    def all_packages\n      @all_packages ||= package_dependency_map.keys.select do |m|\n        m.start_with?(@package_prefix) && !m.start_with?(File.join(@package_prefix, 'vendor'))\n      end\n    end\n\n    def top_level_package_map\n      @top_level_package_map ||= filter_test(all_packages).group_by { |package| package.match(%r{^#{@package_prefix}+[^\\/]*})[0] }\n    end\n\n    # Group folders by their top-level package name.\n    def package_folders_map(packages)\n      package_folders_map = filter_test(packages).group_by { |package| package.match(%r{^#{@package_prefix}+[^\\/]*})[0] }\n      package_folders_map.each { |k, v| package_folders_map[k] = v.map { |vv| package_to_folder(vv) } }\n    end\n\n    def package_to_folder(package)\n      File.join('.', package.gsub(/^#{@package_prefix}/, \"\"), '')\n    end\n\n    def filter_test(packages)\n      packages.reject { |pack| pack.match(/_test$/) }\n    end\n\n    def package_dependency_map\n      return @package_dependency_map if @package_dependency_map\n\n      @package_dependency_map = {}\n      package_info_map.each do |import_path, package_info|\n        # Add itself?\n        @package_dependency_map[import_path] ||= Set.new\n        @package_dependency_map[import_path].add(import_path)\n\n        imports = []\n        imports.concat(package_info[\"Imports\"]) unless package_info[\"Imports\"].nil?\n        imports.concat(package_info[\"TestImports\"]) unless package_info[\"TestImports\"].nil?\n        imports.each do |import|\n          @package_dependency_map[import] ||= Set.new\n          @package_dependency_map[import].add(import_path)\n        end\n\n        xtest_imports = package_info[\"XTestImports\"]\n        next if xtest_imports.nil?\n\n        # Add itself?\n        test_import_path = import_path + '_test'\n        @package_dependency_map[test_import_path] ||= Set.new\n        @package_dependency_map[test_import_path].add(test_import_path)\n\n        xtest_imports.each do |import|\n          @package_dependency_map[import] ||= Set.new\n          @package_dependency_map[import].add(test_import_path)\n        end\n\n      end\n\n      @package_dependency_map\n    end\n\n    def depends_on_map\n      return @depends_on_map if @depends_on_map\n\n      # Create a map on transitive non-test dependency\n      # and a map on direct test dependency.\n      tmp_depends_on_map = {}\n      test_dep_map = {}\n      package_info_map.each do |import_path, package_info|\n        # Add itself?\n        tmp_depends_on_map[import_path] ||= Set.new\n        tmp_depends_on_map[import_path].add(import_path)\n\n        deps = package_info[\"Deps\"]\n        deps&.each do |dep|\n          tmp_depends_on_map[dep] ||= Set.new\n          tmp_depends_on_map[dep].add(import_path)\n        end\n\n        test_imports = package_info[\"TestImports\"]\n        test_imports&.each do |import|\n          test_dep_map[import] ||= Set.new\n          test_dep_map[import].add(import_path)\n        end\n\n        xtest_imports = package_info[\"XTestImports\"]\n        next if xtest_imports.nil?\n\n        # Add itself?\n        test_import_path = import_path + '_test'\n        tmp_depends_on_map[test_import_path] ||= Set.new\n        tmp_depends_on_map[test_import_path].add(test_import_path)\n\n        xtest_imports.each do |import|\n          test_dep_map[import] ||= Set.new\n          test_dep_map[import].add(test_import_path)\n        end\n      end\n\n      @depends_on_map = {}\n\n      tmp_depends_on_map.each do |import_path, deps|\n        @depends_on_map[import_path] = Set.new\n\n        deps.each do |dep|\n          @depends_on_map[import_path].add(dep)\n\n          test_deps = test_dep_map[dep]\n          next if test_deps.nil?\n          test_deps.each do |test_dep|\n            @depends_on_map[import_path].add(test_dep)\n          end\n        end\n      end\n\n      @depends_on_map\n    end\n\n    def package_info_map\n      return @package_info_map if @package_info_map\n\n      @package_info_map = {}\n\n      GitRepo.inside_copy(@build.repository, @build.ref) do |dir|\n        # Relocate all the code in src/#{@package_prefix}\n        # Apparently, go list generates bad package names if we don't do this.\n        src_dir = FileUtils.mkdir_p(File.join(dir, \"src\", @package_prefix))[0]\n        Cocaine::CommandLine.new(\"mv $(git ls-tree --name-only HEAD) #{src_dir}\").run\n\n        # Run \"go list\". Note that the output is NOT a valid single\n        # JSON value, but multiple JSON values. See https://github.com/golang/go/issues/12643.\n        begin\n          outputs = Cocaine::CommandLine.new(\"GOPATH=#{dir} go list -json ./...\").run\n        rescue Cocaine::ExitStatusError => e\n          raise DependencyError, \"error running 'go list -json ./...' \\n\\n #{e.message}\"\n        end\n        l = outputs[1..-3].split(\"}\\n{\")\n        l.each do |blob|\n          package_info = JSON.parse(\"{\" + blob + \"}\")\n          import_path = package_info[\"ImportPath\"]\n          @package_info_map[import_path] = package_info\n        end\n      end\n\n      @package_info_map\n    end\n\n    def add_partitions(packages)\n      @partition_list = []\n      package_map = package_folders_map(packages)\n\n      all_packages_target_types.each do |target_type, workers|\n        if workers.is_a?(Hash)\n          package_map.each do |package, folders|\n            worker_number = workers[package.gsub(/^#{@package_prefix}/, \"\")]\n            add_with_split(folders, target_type, worker_number)\n          end\n        elsif workers.is_a?(Integer)\n          add_with_split(package_map.map { |_, v| v }.flatten.uniq, target_type, workers)\n        end\n      end\n\n      top_level_packages_target_types.each do |target_type, workers|\n        if workers.is_a?(Hash)\n          package_map.each do |package, folders|\n            worker_number = workers[package.gsub(/^#{@package_prefix}/, \"\")]\n            add_with_split(folders, target_type, worker_number)\n          end\n        elsif workers.is_a?(Integer)\n          add_with_split(package_map.map { |k, _| package_to_folder(k) }.uniq, target_type, workers)\n        end\n      end\n\n      @partition_list\n    end\n\n    def add_with_split(package_list, target_type, workers)\n      return if package_list.size.zero?\n      if workers\n        split_size = (package_list.size / workers.to_f).ceil\n        Array(package_list).each_slice(split_size).to_a.each do |chunk|\n          @partition_list << partition_info(chunk, target_type)\n        end\n      else\n        @partition_list << partition_info(package_list, target_type)\n      end\n    end\n\n    def partition_info(packages, type)\n      queue = @build.branch_record.convergence? ? 'ci' : 'developer'\n      queue_override = @settings.fetch('queue_overrides', []).detect do |override|\n        override['queue'] if override['paths']&.detect { |path| packages.include? path }\n      end\n      queue = \"#{queue}-#{queue_override['queue']}\" if queue_override.present?\n      {\n        'type' => type,\n        'files' => packages&.sort!,\n        'queue' => queue,\n        'retry_count' => @options.fetch('retry_count', 0),\n        'options' => @options\n      }\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner/maven.rb",
    "content": "# frozen_string_literal: true\nrequire 'nokogiri'\nrequire 'set'\nrequire 'partitioner/base'\nrequire 'partitioner/topological_sorter'\n\nmodule Partitioner\n  # This partitioner uses knowledge of Maven to shard large java repos\n  class Maven < Base\n    POM_XML = 'pom.xml'\n\n    def initialize(build, kochiku_yml)\n      @build = build\n      @options = {}\n      if kochiku_yml\n        @settings = kochiku_yml['maven_settings'] if kochiku_yml['maven_settings']\n        @options['log_file_globs'] = Array(kochiku_yml['log_file_globs']) if kochiku_yml['log_file_globs']\n        @options['retry_count'] = kochiku_yml['retry_count'] if kochiku_yml['retry_count']\n      end\n      @settings ||= {}\n    end\n\n    def partitions\n      Rails.logger.info(\"Partition started: [maven] #{@build.ref}\")\n      start = Time.current\n      modules_to_build = Set.new\n\n      GitRepo.inside_copy(@build.repository, @build.ref) do\n        @settings.fetch('always_build', []).each do |maven_module|\n          modules_to_build.add(maven_module)\n        end\n\n        files_changed_method = @build.branch_record.convergence? ? :files_changed_since_last_build : :files_changed_in_branch\n        GitBlame.public_send(files_changed_method, @build, sync: false).each do |file_and_emails|\n          next if @settings.fetch('ignore_paths', []).detect { |dir| file_and_emails[:file].start_with?(dir) }\n\n          module_affected_by_file = file_to_module(file_and_emails[:file])\n\n          if module_affected_by_file.nil? ||\n             @settings.fetch('build_everything', []).detect { |dir| file_and_emails[:file].start_with?(dir) }\n\n            return add_options(all_partitions)\n          else\n            modules_to_build.merge(depends_on_map[module_affected_by_file] || Set.new)\n          end\n        end\n\n        if @build.branch_record.convergence? && @build.previous_build\n          modules_to_build.merge(@build.previous_build.build_parts.select(&:unsuccessful?).map(&:paths).flatten.uniq)\n        end\n\n        add_options(group_modules(sort_modules(modules_to_build)))\n      end\n    ensure\n      # TODO: log this information to event stream\n      Rails.logger.info(\"Partition finished: [maven] #{Time.current - start} #{@build.ref}\")\n    end\n\n    def emails_for_commits_causing_failures\n      return {} unless @build.branch_record.convergence?\n\n      failed_modules = @build.build_parts.failed_or_errored.each_with_object(Set.new) do |build_part, failed_set|\n        build_part.paths.each { |path| failed_set.add(path) }\n      end\n\n      email_and_files = Hash.new { |hash, key| hash[key] = [] }\n\n      GitRepo.inside_copy(@build.repository, @build.ref) do\n        GitBlame.files_changed_since_last_green(@build, fetch_emails: true).each do |file_and_emails|\n          file = file_and_emails[:file]\n          emails = file_and_emails[:emails]\n\n          module_affected_by_file = file_to_module(file_and_emails[:file])\n\n          if module_affected_by_file.nil? || @settings.fetch('build_everything', []).detect { |dir| file_and_emails[:file].start_with?(dir) }\n            emails.each { |email| email_and_files[email] << file }\n          elsif (set = depends_on_map[module_affected_by_file]) && !set.intersection(failed_modules).empty?\n            emails.each { |email| email_and_files[email] << file }\n          end\n        end\n      end\n\n      email_and_files.each_key { |email| email_and_files[email].sort!.uniq! }\n\n      email_and_files\n    end\n\n    # Everything below this line should be private\n\n    def maven_modules\n      return @maven_modules if @maven_modules\n      top_level_pom = Nokogiri::XML(File.read(POM_XML))\n      @maven_modules = top_level_pom.css('project>modules>module').map { |mvn_module| mvn_module.text }\n    end\n\n    def all_partitions\n      group_modules(sort_modules(maven_modules))\n    end\n\n    def pom_for(mvn_module)\n      Nokogiri::XML(File.read(\"#{mvn_module}/pom.xml\"))\n    end\n\n    def add_options(group_modules)\n      # create multiple entries for builds specifying multiple workers, assigning\n      # distinct test chunks to each\n      group_modules.flat_map do |group|\n        multiple_workers_list = @settings.fetch('multiple_workers', {})\n\n        multiple_workers_module = multiple_workers_list.keys.detect do |path|\n          group['files'].include? path\n        end\n\n        need_multiple_workers = multiple_workers_module.present?\n\n        if need_multiple_workers\n          total_workers = multiple_workers_list[multiple_workers_module]\n\n          (1..total_workers).map { |worker_chunk|\n            new_group = group.clone\n            new_options = @options.clone\n            new_options['total_workers'] = total_workers\n            new_options['worker_chunk'] = worker_chunk\n\n            new_group['options'] = new_options\n            new_group\n          }\n        else\n          group['options'] = @options\n          group\n        end\n      end\n    end\n\n    def group_modules(mvn_modules)\n      expanding_dirs = @settings.fetch('expand_directories', [])\n      mvn_modules.group_by do |m|\n        split_dirs = m.split(\"/\")\n        if expanding_dirs.include? split_dirs.first\n          \"#{split_dirs[0]}/#{split_dirs[1]}\"\n        else\n          split_dirs.first\n        end\n      end.values.map { |modules| partition_info(modules) }\n    end\n\n    def sort_modules(mvn_modules)\n      sorted_modules = Partitioner::TopologicalSorter.new(module_dependency_map).tsort\n      sorted_modules.delete_if { |mvn_module| !mvn_modules.include?(mvn_module) }\n    end\n\n    def partition_info(mvn_modules)\n      queue = @build.branch_record.convergence? ? 'ci' : 'developer'\n      queue_override = @settings.fetch('queue_overrides', []).detect do |override|\n        override['queue'] if override['paths'].detect { |path| mvn_modules.include? path }\n      end\n      queue = \"#{queue}-#{queue_override['queue']}\" if queue_override.present?\n      {\n        'type' => 'maven',\n        'files' => mvn_modules.sort!,\n        'queue' => queue,\n        'retry_count' => @options.fetch('retry_count', 0)\n      }\n    end\n\n    def depends_on_map\n      return @depends_on_map if @depends_on_map\n\n      module_depends_on_map = {}\n      transitive_dependency_map.each do |mvn_module, dep_set|\n        module_depends_on_map[mvn_module] ||= Set.new\n        module_depends_on_map[mvn_module].add(mvn_module)\n        dep_set.each do |dep|\n          module_depends_on_map[dep] ||= Set.new\n          module_depends_on_map[dep].add(dep)\n          module_depends_on_map[dep].add(mvn_module)\n        end\n      end\n\n      @depends_on_map = module_depends_on_map\n    end\n\n    def module_dependency_map\n      return @module_dependency_map if @module_dependency_map\n\n      group_artifact_map = {}\n\n      maven_modules.each do |mvn_module|\n        module_pom = pom_for(mvn_module)\n        group_id = module_pom.css('project>groupId').first\n        artifact_id = module_pom.css('project>artifactId').first\n        next unless group_id && artifact_id\n        group_id = group_id.text\n        artifact_id = artifact_id.text\n\n        group_artifact_map[\"#{group_id}:#{artifact_id}\"] = mvn_module.to_s\n      end\n\n      @module_dependency_map = {}\n\n      maven_modules.each do |mvn_module|\n        module_pom = pom_for(mvn_module)\n        @module_dependency_map[mvn_module] ||= Set.new\n\n        module_pom.css('project>dependencies>dependency').each do |dep|\n          group_id = dep.css('groupId').first\n          artifact_id = dep.css('artifactId').first\n\n          raise \"dependency in #{mvn_module}/pom.xml is missing an artifactId or groupId\" unless group_id && artifact_id\n\n          if (mod = group_artifact_map[\"#{group_id.text}:#{artifact_id.text}\"])\n            module_dependency_map[mvn_module].add(mod)\n          end\n        end\n      end\n\n      @module_dependency_map\n    end\n\n    def transitive_dependency_map\n      @transitive_dependency_map ||= begin\n        module_dependency_map.each_with_object({}) do |(mvn_module, _), dep_map|\n          dep_map[mvn_module] = transitive_dependencies(mvn_module, module_dependency_map)\n        end\n      end\n    end\n\n    def transitive_dependencies(mvn_module, dependency_map)\n      result_set = Set.new\n      to_process = [mvn_module]\n\n      while (dep_module = to_process.shift)\n        deps = dependency_map[dep_module].to_a\n        to_process += (deps - result_set.to_a)\n        result_set << dep_module\n      end\n\n      result_set\n    end\n\n    def file_to_module(file_path)\n      dir_path = file_path\n      while (dir_path = File.dirname(dir_path)) != \".\"\n        return dir_path if File.exist?(\"#{dir_path}/pom.xml\")\n      end\n      nil\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner/topological_sorter.rb",
    "content": "require 'tsort'\n\nmodule Partitioner\n  class TopologicalSorter\n    include TSort\n\n    def initialize(dependency_map)\n      @dependency_map = dependency_map\n    end\n\n    def tsort_each_node(&block)\n      @dependency_map.each_key(&block)\n    end\n\n    def tsort_each_child(project, &block)\n      @dependency_map.fetch(project).each(&block)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/partitioner.rb",
    "content": "require 'partitioner/base'\nrequire 'partitioner/maven'\nrequire 'partitioner/default'\nrequire 'partitioner/dependency_map'\n\nmodule Partitioner\n  def self.for_build(build)\n    kochiku_yml = build.kochiku_yml\n    if kochiku_yml\n      start = Time.current\n      res = case kochiku_yml['partitioner']\n            when 'maven'\n              Partitioner::Maven.new(build, kochiku_yml)\n            when 'dependency_map'\n              Partitioner::DependencyMap.new(build, kochiku_yml)\n            when 'go'\n              Partitioner::Go.new(build, kochiku_yml)\n            else\n              # Default behavior\n              Partitioner::Default.new(build, kochiku_yml)\n            end\n      finish = Time.current\n      diff = finish - start\n      Rails.logger.info(\"Partition finished: [#{kochiku_yml['partitioner'] || 'DEFAULT'}] #{diff} #{build.ref}\")\n      res\n    else\n      # This should probably raise\n      Partitioner::Base.new(build, kochiku_yml)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/remote_server/github.rb",
    "content": "require 'github_commit_status'\nrequire 'github_post_receive_hook'\nrequire 'github_request'\nrequire 'git_merge_executor'\n\nmodule RemoteServer\n  # All integration with Github must go via this class.\n  class Github\n    URL_PARSERS = [\n      %r{\\Agit@(?<host>[^:]*):(?<username>[^\\/]*)/(?<name>[-.\\w]+?)(\\.git)?\\z},        # git@\n      %r{\\Agit://(?<host>[^\\/]*)/(?<username>[^\\/]*)/(?<name>[-.\\w]+)\\.git\\z},         # git://  (GHE only)\n      %r{\\Ahttps?://(?<host>[^\\/]*)/(?<username>[^\\/]*)/(?<name>[-.\\w]+?)(\\.git)?\\z},  # https://\n    ].freeze\n\n    def initialize(url, server)\n      @url = url\n      @settings = server\n      attributes # force url parsing\n    end\n\n    def attributes\n      @attributes ||= begin\n        parser = URL_PARSERS.detect { |regexp| @url =~ regexp }\n        raise UnknownUrlFormat, \"Do not recognize #{@url} as a github URL.\" unless parser\n\n        match = @url.match(parser)\n\n        {\n          host: match[:host],\n          repository_namespace: match[:username],\n          repository_name: match[:name],\n          possible_hosts: [@settings.host, *@settings.aliases].compact,\n        }.freeze\n      end\n    end\n\n    # Class to use for merge methods\n    def merge_executor\n      GitMergeExecutor\n    end\n\n    # Public: Returns a url for the remote repo in the format Kochiku prefers\n    # for Github, which is the SSH format.\n    def canonical_repository_url\n      \"git@#{@settings.host}:#{attributes[:repository_namespace]}/#{attributes[:repository_name]}.git\"\n    end\n\n    def url_for_compare(first_commit_hash, second_commit_hash)\n      \"#{base_html_url}/compare/#{first_commit_hash}...#{second_commit_hash}#files_bucket\"\n    end\n\n    # Where to fetch from: git mirror if defined,\n    # otherwise the canonical url\n    def url_for_fetching\n      if @settings.mirror.present?\n        canonical_repository_url.gsub(%r{(git@|https://).*?(:|/)}, @settings.mirror)\n      else\n        canonical_repository_url\n      end\n    end\n\n    def sha_for_branch(branch)\n      response_body = GithubRequest.get(\"#{base_api_url}/git/refs/heads/#{branch}\", @settings.oauth_token)\n      branch_info = JSON.parse(response_body)\n      sha = nil\n      if branch_info['object'] && branch_info['object']['sha'].present?\n        sha = branch_info['object']['sha']\n      end\n      sha\n    rescue GithubRequest::ResponseError\n      raise RefDoesNotExist, \"Could not locate ref #{branch} on remote git server\"\n    end\n\n    def update_commit_status!(build)\n      GithubCommitStatus.new(build, @settings.oauth_token).update_commit_status!\n    end\n\n    def install_post_receive_hook!(repo)\n      GithubPostReceiveHook.new(repo, @settings.oauth_token).subscribe!\n    end\n\n    def base_api_url\n      if @url =~ /github\\.com/\n        \"https://api.#{attributes[:host]}/repos/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}\"\n      else # github enterprise\n        \"https://#{attributes[:host]}/api/v3/repos/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}\"\n      end\n    end\n\n    def href_for_commit(sha)\n      \"#{base_html_url}/commit/#{sha}\"\n    end\n\n    def base_html_url\n      \"https://#{attributes[:host]}/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}\"\n    end\n\n    def get_branch_url(branch_name)\n      \"#{base_html_url}/tree/#{branch_name}\"\n    end\n\n    def open_pull_request_url(branch_name)\n      \"#{base_html_url}/pull/new/master...#{branch_name}\"\n    end\n  end\nend\n"
  },
  {
    "path": "lib/remote_server/stash.rb",
    "content": "require 'cgi'\nrequire 'stash_merge_executor'\n\nmodule RemoteServer\n  class StashAPIError < StandardError; end\n\n  # All integration with Stash must go via this class.\n  class Stash\n    attr_reader :stash_request\n\n    URL_PARSERS = [\n      %r{\\Agit@(?<host>[^:]*):(?<username>[^\\/]*)/(?<name>[-.\\w]+)\\.git\\z},\n      %r{\\Assh://git@(?<host>[^\\/]*?)(?<port>:\\d+)?/(?<username>[^\\/]*)/(?<name>[-.\\w]+)\\.git\\z},\n      %r{\\Ahttps://(?<host>[^@\\/]+)/scm/(?<username>[^\\/]*)/(?<name>[-.\\w]+)\\.git\\z},\n      %r{\\Ahttps://(?<host>[^@\\/]+)/projects/(?<username>[^\\/]*)/repos/(?<name>[-.\\w]+)/browse\\z}\n    ].freeze\n\n    def initialize(url, server)\n      @url = url\n      @settings = server\n      attributes # force url parsing\n      @stash_request = StashRequest.new(@settings)\n    end\n\n    def attributes\n      @attributes ||= begin\n        parser = URL_PARSERS.detect { |regexp| @url =~ regexp }\n        raise UnknownUrlFormat, \"Do not recognize #{@url} as a Stash url.\" unless parser\n\n        match = @url.match(parser)\n\n        attributes = {\n          host: match[:host],\n          repository_namespace: match[:username],\n          repository_name: match[:name],\n          possible_hosts: [@settings.host, *@settings.aliases].compact,\n        }\n        if match.names.include?('port') && match['port'].present?\n          attributes[:port] = match[:port].delete(':')\n        end\n        attributes.freeze\n      end\n    end\n\n    # Class to use for merge methods\n    def merge_executor\n      StashMergeExecutor\n    end\n\n    # Public: Returns a url for the remote repo in the format Kochiku prefers\n    # for Stash, which is the HTTPS format.\n    def canonical_repository_url\n      \"https://#{@settings.host}/scm/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}.git\"\n    end\n\n    # Currently, stash does not support comparison between two arbitrary hashes-- it only supports comparison\n    # between heads of branches.\n    # For now, we return the comparison of HEAD at refs/heads/master and the branch for the green_builds, if any\n    def url_for_compare(first_commit_branch, second_commit_branch)\n      if second_commit_branch.blank?\n        \"#{base_html_url}/compare/commits?targetBranch=refs%2Fheads%2Fmaster\"\n      else\n        \"#{base_html_url}/compare/commits?targetBranch=#{second_commit_branch}&sourceBranch=refs%2Fheads%2Fmaster\"\n      end\n    end\n\n    # Where to fetch from: git mirror if defined,\n    # otherwise the canonical url\n    def url_for_fetching\n      if @settings.mirror.present?\n        canonical_repository_url.gsub(%r{(git@|https://).*?(:|/)}, @settings.mirror)\n      else\n        canonical_repository_url\n      end\n    end\n\n    def sha_for_branch(branch)\n      return branch if branch =~ /\\A[0-9a-f]{40}\\Z/\n\n      response_body = @stash_request.get(base_api_url +\n        \"/commits?until=#{CGI.escape(branch)}&limit=1\")\n      response = JSON.parse(response_body)\n      branch_data = response[\"values\"][0] || {}\n      branch_data['id']\n    rescue => e\n      case e.message.split(\" \")[0]\n      when \"Net::HTTPNotFound\", \"Net::HTTPBadRequest\"\n        raise RefDoesNotExist, \"Could not locate ref #{branch} on remote git server\"\n      else\n        raise e\n      end\n    end\n\n    def update_commit_status!(build)\n      build_url = Rails.application.routes.url_helpers.repository_build_url(build.repository.to_param, build)\n\n      @stash_request.post \"https://#{@settings.host}/rest/build-status/1.0/commits/#{build.ref}\", {\n        state:       stash_status_for(build),\n        key:         'kochiku',\n        name:        \"kochiku-#{build.id}\",\n        url:         build_url,\n        description: \"\"\n      }\n    end\n\n    def install_post_receive_hook!(repo)\n      # Unimplemented\n    end\n\n    def base_api_url\n      \"https://#{@settings.host}/rest/api/1.0/projects/#{attributes[:repository_namespace]}/repos/#{attributes[:repository_name]}\"\n    end\n\n    def base_html_url\n      \"https://#{@settings.host}/projects/#{attributes[:repository_namespace].upcase}/repos/#{attributes[:repository_name]}\"\n    end\n\n    def href_for_commit(sha)\n      \"#{base_html_url}/commits/#{sha}\"\n    end\n\n    def get_branch_url(branch_name)\n      \"#{base_html_url}?at=refs/heads/#{branch_name}\"\n    end\n\n    def open_pull_request_url(branch_name)\n      \"#{base_html_url}/compare/commits?sourceBranch=refs/heads/#{branch_name}\"\n    end\n\n    # uses the stash REST api to merge a pull request\n    # raises StashAPIError if an error occurs\n    # otherwise returns true or false depending on whether merge succeeds\n    #\n    # TODO pass in expected SHA for head to branch to prevent merging a branch\n    # that is in an unexpected state\n    def merge(branch)\n      @pr_ids ||= get_pr_id_and_version(branch)\n      pr_id, pr_version = @pr_ids\n      success = can_merge?(pr_id) && perform_merge(pr_id, pr_version)\n\n      if success\n        Rails.logger.info(\"Request to stash to merge PR #{pr_id} for branch #{branch} succeeded.\")\n      else\n        Rails.logger.warn(\"Request to stash to merge PR #{pr_id} for branch #{branch} failed.\")\n      end\n\n      success\n    end\n\n    def head_commit(branch)\n      @pr_ids ||= get_pr_id_and_version(branch)\n      pr_id, _pr_version = @pr_ids\n      response = @stash_request.get(\"#{base_api_url}/pull-requests/#{pr_id}/commits\")\n      pr_commits = JSON.parse(response)\n      raise StashAPIError, pr_commits[\"errors\"] if pr_commits[\"errors\"].present?\n\n      pr_commits[\"values\"].first[\"id\"]\n    end\n\n    # uses the stash REST api to delete a branch\n    # raises StashAPIError if error occurs, else return true\n    def delete_branch(branch, dryRun = false)\n      url = \"https://#{attributes[:host]}/rest/branch-utils/1.0/projects/#{attributes[:repository_namespace]}/repos/#{attributes[:repository_name]}/branches\"\n\n      delete_params = {\n        \"name\" => branch,\n        \"dryRun\" => dryRun\n      }\n\n      response = @stash_request.delete(url, delete_params)\n\n      if response\n        jsonbody = JSON.parse(response)\n        raise StashAPIError, jsonbody[\"errors\"].to_s if jsonbody[\"errors\"]\n      end\n\n      true\n    end\n\n    # return PR id, version number if a single PR exists for corresponding branch\n    # else raise StashAPIError\n    def get_pr_id_and_version(branch)\n      url = \"#{base_api_url}/pull-requests?direction=outgoing&at=refs/heads/#{branch}&state=open&limit=25\"\n      response = @stash_request.get(url)\n      jsonbody = JSON.parse(response)\n      raise StashAPIError if jsonbody[\"errors\"].present? || jsonbody[\"size\"] != 1\n\n      return jsonbody[\"values\"][0][\"id\"], jsonbody[\"values\"][0][\"version\"]\n    end\n\n    private\n\n    # use stash REST api to query if a merge is possible\n    # raise StashAPIError if an error in API response, else return true\n    def can_merge?(pr_id)\n      url = \"#{base_api_url}/pull-requests/#{pr_id}/merge\"\n      response = @stash_request.get(url)\n\n      jsonbody = JSON.parse(response)\n\n      raise StashAPIError if jsonbody[\"errors\"]\n\n      if jsonbody[\"canMerge\"] == false\n        Rails.logger.info(\"Could not merge PR #{pr_id}:\")\n        Rails.logger.info(\"conflicted: #{jsonbody[\"conflicted\"]}\")\n        Rails.logger.info(\"vetoes: #{jsonbody[\"vetoes\"]}\")\n        return false\n      end\n\n      true\n    end\n\n    # use stash REST api to merge PR\n    # raise StashAPIError if an error in API response, else return true\n    def perform_merge(pr_id, pr_version)\n      url = \"#{base_api_url}/pull-requests/#{pr_id}/merge?version=#{pr_version}\"\n      response = @stash_request.post(url, nil)\n      jsonbody = JSON.parse(response)\n\n      raise StashAPIError if jsonbody[\"errors\"]\n      true\n    end\n\n    def stash_status_for(build)\n      if build.succeeded?\n        'SUCCESSFUL'\n      elsif build.failed? || build.aborted?\n        'FAILED'\n      else\n        'INPROGRESS'\n      end\n    end\n\n    class StashRequest\n      def initialize(settings)\n        @settings = settings\n      end\n\n      # TODO: Configure OAuth\n\n      def setup_auth!(req)\n        req.basic_auth(@settings.stash_username, @settings.stash_password)\n      end\n\n      def get(url)\n        Rails.logger.info(\"Stash GET: #{url}\")\n        get = Net::HTTP::Get.new(url)\n        setup_auth! get\n        make_request(get, URI(url))\n      end\n\n      def post(url, body = {})\n        Rails.logger.info(\"Stash POST: #{url}, #{body}\")\n        post = Net::HTTP::Post.new(url, {'Content-Type' => 'application/json'})\n        setup_auth! post\n        post.body = body.to_json\n        make_request(post, URI(url))\n      end\n\n      def delete(url, body)\n        Rails.logger.info(\"Stash DELETE: #{url}, #{body}\")\n        delete_request = Net::HTTP::Delete.new(url, {'Content-Type' => 'application/json'})\n        setup_auth! delete_request\n        body ||= {}\n        delete_request.body = body.to_json\n        make_request(delete_request, URI(url))\n      end\n\n      def make_request(method, url, args = [])\n        uri = URI(url)\n        body = nil\n        Net::HTTP.start(uri.host, uri.port, :use_ssl => true) do |http|\n          response = http.request(method, *args)\n          body = response.body\n          Rails.logger.info(\"Stash response: #{response.inspect}\")\n          Rails.logger.info(\"Stash response body: #{body.inspect}\")\n          unless response.is_a?(Net::HTTPSuccess)\n            if response.is_a?(Net::HTTPUnauthorized)\n              raise RemoteServer::AccessDenied.new(url, method, body)\n            else\n              raise \"#{response.class} body: #{body}\"\n            end\n          end\n        end\n        body\n      end\n    end\n\n  end\nend\n"
  },
  {
    "path": "lib/remote_server.rb",
    "content": "require 'remote_server/github'\nrequire 'remote_server/stash'\n\nmodule RemoteServer\n  UnknownGitServer = Class.new(RuntimeError)\n  UnknownUrlFormat = Class.new(RuntimeError)\n  RefDoesNotExist = Class.new(RuntimeError)\n  class AccessDenied < StandardError\n    def initialize(url, action, original_message = nil)\n      @url = url\n      @action = action\n      @original_message = original_message\n    end\n\n    def to_s\n      \"Authorization failure when attempting to call #{@url} via #{@action}\"\n    end\n  end\n\n  def self.for_url(url)\n    server = Settings.git_server(url)\n\n    raise UnknownGitServer, url unless server\n\n    case server.type\n    when 'stash'\n      RemoteServer::Stash.new(url, server)\n    when 'github'\n      RemoteServer::Github.new(url, server)\n    else\n      raise UnknownGitServer, \"No implementation for server type #{type}\"\n    end\n  end\n\n  def self.parseable_url?(url)\n    (RemoteServer::Stash::URL_PARSERS + RemoteServer::Github::URL_PARSERS).any? do |format|\n      url =~ format\n    end\n  end\n\n  def self.valid_git_host?(url)\n    !!Settings.git_server(url)\n  end\nend\n"
  },
  {
    "path": "lib/server_settings.rb",
    "content": "class ServerSettings\n\n  attr_reader :type, :oauth_token, :stash_username, :stash_password, :mirror, :host, :aliases\n\n  def initialize(raw_settings, host)\n    @host = host\n    if raw_settings\n      @type = raw_settings[:type]\n      @mirror = raw_settings[:mirror]\n      @aliases = raw_settings[:aliases]\n\n      # specific to Github\n      if raw_settings[:oauth_token_file]\n        @oauth_token = File.read(raw_settings[:oauth_token_file]).chomp\n      end\n\n      # specific to Stash\n      @stash_username = raw_settings[:username]\n      if raw_settings[:password_file]\n        @stash_password = File.read(raw_settings[:password_file]).chomp\n      end\n    else\n      @type = nil\n      @mirror = nil\n      @aliases = nil\n      @oauth_token = nil\n      @stash_username = nil\n      @stash_password = nil\n    end\n  end\nend\n"
  },
  {
    "path": "lib/settings_accessor.rb",
    "content": "require 'yaml'\nrequire 'active_support/core_ext/hash/indifferent_access'\nrequire 'server_settings'\n\nclass SettingsAccessor\n  def initialize(yaml)\n    @hash = YAML.load(yaml).with_indifferent_access\n  end\n\n  def sender_email_address\n    @hash[:sender_email_address]\n  end\n\n  def kochiku_notifications_email_address\n    @hash[:kochiku_notifications_email_address]\n  end\n\n  def domain_name\n    @hash[:domain_name]\n  end\n\n  def kochiku_protocol\n    @hash[:use_https] ? \"https\" : \"http\"\n  end\n\n  def kochiku_host\n    @hash[:kochiku_host]\n  end\n\n  def kochiku_host_with_protocol\n    \"#{kochiku_protocol}://#{kochiku_host}\"\n  end\n\n  def git_servers\n    @git_servers ||= begin\n      raw_servers = @hash[:git_servers]\n      if raw_servers\n        raw_servers.each_with_object({}) do |(host, settings_for_server), result|\n          result[host] = ServerSettings.new(settings_for_server, host)\n        end\n      else\n        {}\n      end\n    end\n  end\n\n  def git_server(url)\n    git_servers.values.detect do |server|\n      url.include?(server.host) ||\n        (server.aliases && server.aliases.detect { |a| url.include?(a) })\n    end\n  end\n\n  def smtp_server\n    @hash[:smtp_server]\n  end\n\n  def redis_host\n    @hash[:redis_host]\n  end\n\n  def redis_port\n    @hash.fetch(:redis_port, 6379)\n  end\n\n  def git_pair_email_prefix\n    @hash[:git_pair_email_prefix]\n  end\nend\n"
  },
  {
    "path": "lib/stash_merge_executor.rb",
    "content": "require 'open3'\nrequire 'git_merge_executor'\n\nclass StashMergeExecutor < GitMergeExecutor\n\n  # Merges the branch associated with a build using the Stash REST api.\n  def merge_and_push\n    remote_server = @build.repository.remote_server\n    Rails.logger.info(\"Trying to merge branch #{@build.branch_record.name} after build id #{@build.id} using Stash REST api\")\n    branch_name = @build.branch_record.name\n    head_commit = remote_server.head_commit(branch_name)\n    merge_success = remote_server.merge(branch_name)\n    unless merge_success\n      Rails.logger.info(\"Merge of #{@build.branch_record.name} failed.\")\n      raise GitMergeFailedError\n    end\n    { merge_commit: head_commit, log_output: \"Successfully merged #{@build.branch_record.name}\" }\n  end\n\n  # Delete branch associated with a build using Stash REST api.\n  def delete_branch\n    remote_server = @build.repository.remote_server\n    begin\n      Rails.logger.info(\"Trying to delete branch using Stash REST api\")\n      remote_server.delete_branch(@build.branch_record.name)\n    rescue RemoteServer::StashAPIError => e\n      Rails.logger.warn(\"Deletion of branch #{@build.branch_record.name} failed\")\n      Rails.logger.warn(e.message)\n    end\n  end\nend\n"
  },
  {
    "path": "lib/tasks/.gitkeep",
    "content": ""
  },
  {
    "path": "lib/tasks/kochiku.rake",
    "content": "namespace :kochiku do\n  desc \"Generates time_manifests for a collection of builds; invoke with `rake kochiku:generate_time_manifests['1 2 3 4']`\"\n  task :generate_time_manifests, [:ids] => [:environment] do |_, args|\n    build_ids = args.ids.split\n\n    build_ids.flat_map do |build_id|\n      Build.includes(build_parts: :build_attempts).find(build_id).build_parts\n    end.group_by(&:kind).map do |kind, parts|\n      if parts.map(&:paths).uniq.length > 1\n        File.open(\"#{kind}_time_manifest.yml\", 'w') do |io|\n          YAML.dump(\n            Hash[\n              parts.group_by(&:paths).map do |paths, paths_parts|\n                [paths.join, paths_parts.map(&:elapsed_time)]\n              end\n            ],\n            io\n          )\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "lib/tasks/resque.rake",
    "content": "require 'resque/tasks'\nrequire 'resque/scheduler/tasks'\n\nnamespace :resque do\n  task :setup => :environment\n\n  task :setup_schedule => :setup do\n    require 'resque-scheduler'\n\n    Resque.schedule = YAML.load_file('config/resque_schedule.yml')\n  end\n\n  task :scheduler => :setup_schedule\nend\n"
  },
  {
    "path": "public/404.html",
    "content": "<!DOCTYPE html>\n<html>\n<head>\n  <title>The page you were looking for doesn't exist (404)</title>\n  <style type=\"text/css\">\n    body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; }\n    div.dialog {\n      width: 25em;\n      padding: 0 4em;\n      margin: 4em auto 0 auto;\n      border: 1px solid #ccc;\n      border-right-color: #999;\n      border-bottom-color: #999;\n    }\n    h1 { font-size: 100%; color: #f00; line-height: 1.5em; }\n  </style>\n</head>\n\n<body>\n  <!-- This file lives in public/404.html -->\n  <div class=\"dialog\">\n    <h1>The page you were looking for doesn't exist.</h1>\n    <p>You may have mistyped the address or the page may have moved.</p>\n  </div>\n</body>\n</html>\n"
  },
  {
    "path": "public/422.html",
    "content": "<!DOCTYPE html>\n<html>\n<head>\n  <title>The change you wanted was rejected (422)</title>\n  <style type=\"text/css\">\n    body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; }\n    div.dialog {\n      width: 25em;\n      padding: 0 4em;\n      margin: 4em auto 0 auto;\n      border: 1px solid #ccc;\n      border-right-color: #999;\n      border-bottom-color: #999;\n    }\n    h1 { font-size: 100%; color: #f00; line-height: 1.5em; }\n  </style>\n</head>\n\n<body>\n  <!-- This file lives in public/422.html -->\n  <div class=\"dialog\">\n    <h1>The change you wanted was rejected.</h1>\n    <p>Maybe you tried to change something you didn't have access to.</p>\n  </div>\n</body>\n</html>\n"
  },
  {
    "path": "public/500.html",
    "content": "<!DOCTYPE html>\n<html>\n<head>\n  <title>We're sorry, but something went wrong (500)</title>\n  <style type=\"text/css\">\n    body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; }\n    div.dialog {\n      width: 25em;\n      padding: 0 4em;\n      margin: 4em auto 0 auto;\n      border: 1px solid #ccc;\n      border-right-color: #999;\n      border-bottom-color: #999;\n    }\n    h1 { font-size: 100%; color: #f00; line-height: 1.5em; }\n  </style>\n</head>\n\n<body>\n  <!-- This file lives in public/500.html -->\n  <div class=\"dialog\">\n    <h1>We're sorry, but something went wrong.</h1>\n    <p>We've been notified about this issue and we'll take a look at it shortly.</p>\n  </div>\n</body>\n</html>\n"
  },
  {
    "path": "public/robots.txt",
    "content": "# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file\n#\n# To ban all spiders from the entire site uncomment the next two lines:\n# User-Agent: *\n# Disallow: /\n"
  },
  {
    "path": "script/ci",
    "content": "#!/usr/bin/env bash\n\necho Command: $0 $*\necho\necho \"Environment:\"\nenv\necho\n\n# set -ex: make script verbose and exit on first failed command\nset -ex\n\ngem install bundler -v '>= 1.5.2' --conservative\nbundle check || bundle\nbundle exec rake db:create db:schema:load RAILS_ENV=test\nbundle exec rspec\nbundle exec rubocop\nbundle exec haml-lint app/views/\n"
  },
  {
    "path": "script/kochiku-build.sh.sample",
    "content": "#!/usr/bin/env ruby\n\n# This script can be used to initiate a kochiku build of the current branch.\n# The script does not upload any code so the branch must be pushed to the remote before the script is executed.\n#\n# In order to use:\n#   - copy the script to your local machine\n#   - set KOCHIKU_HOST to the host where Kochiku is running\n\nKOCHIKU_HOST = 'https://kochiku.example.com'\n\nrequire 'net/https'\nrequire 'uri'\nrequire 'shellwords'\n\n# Merge on success requested?\nif ARGV.delete(\"--merge\")\n  merge_on_success = \"1\"\nend\n\n# If a ref is given as an argument, use that. Otherwise use the current commit\nif ARGV[0]\n  commit_ish = Shellwords.escape(ARGV[0])\n  branch = \"\"\nelse\n  commit_ish = \"HEAD\"\n  branch = `git rev-parse --abbrev-ref HEAD`.strip\nend\n\nref = `git rev-parse #{commit_ish}`.strip\n\n# verify that the ref exists on origin\ngit_branch_contains = `git branch -r --contains #{ref}`\n\n# git branch --contains will return a non-zero exit code if it does not recognize\n# the sha. It will return a 0 exit code and no output if the ref only exists locally\nif ($? == 0 && git_branch_contains.empty?) || $? != 0\n  puts \"Failed: please push #{ref}\"\n  exit(1)\nend\n\nrepo_url = `git config --get remote.origin.url`.strip\n\nuri = URI.parse(\"#{KOCHIKU_HOST}/builds\")\n\nparams = {\n  'git_sha' => ref,\n  'git_branch' => branch,\n  'merge_on_success' => merge_on_success,\n  'repo_url' => repo_url,\n}\n\nhttp = Net::HTTP.new(uri.host, uri.port)\nhttp.use_ssl = true\n\nrequest = Net::HTTP::Post.new(uri.request_uri)\nrequest.set_form_data(params)\n\nresponse = http.request(request)\n\nif response.code.to_i >= 400\n  puts \"#{response.code} #{response.message}:\"\n  puts response.body\n  exit(1)\nelse\n  puts response['location']\n  `open -g #{response['location']}`\nend\n"
  },
  {
    "path": "spec/controllers/branches_controller_spec.rb",
    "content": "require 'spec_helper'\nrequire 'rexml/document'\n\ndescribe BranchesController do\n  render_views\n\n  describe \"#index\" do\n    let(:repo) { FactoryBot.create(:repository) }\n    let!(:a) { FactoryBot.create(:branch, name: 'aster', repository: repo, convergence: true) }\n    let!(:b) { FactoryBot.create(:branch, name: 'buckeye', repository: repo, updated_at: 30.minutes.ago) }\n    let!(:c) { FactoryBot.create(:branch, name: 'creosote', repository: repo, updated_at: 15.minutes.ago) }\n\n    it \"shows branches in order\" do\n      get :index, params: { repository_path: repo }\n      expect(assigns(:convergence_branches).map(&:name)).to eq(%w{aster})\n      expect(assigns(:recently_active_branches).map(&:name)).to eq(%w{creosote buckeye})\n    end\n  end\n\n  describe \"#show\" do\n    let(:branch) { FactoryBot.create(:branch) }\n    let!(:build1) { FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded', :test_command => \"script/ci\") }\n    let!(:build2) { FactoryBot.create(:build, :branch_record => branch, :state => 'errored', :test_command => \"script/ci\") }\n\n    it \"should return an rss feed of builds\" do\n      get :show, params: { repository_path: branch.repository, id: branch, format: :rss }\n      doc = REXML::Document.new(response.body)\n\n      items = doc.elements.to_a(\"//channel/item\")\n      expect(items.length).to eq(Build.count)\n      expect(items.first.elements.to_a(\"title\").first.text).to eq(\"Build Number #{build2.id} failed\")\n      expect(items.last.elements.to_a(\"title\").first.text).to eq(\"Build Number #{build1.id} success\")\n    end\n\n    it \"should return a JSON if requested\" do\n      get :show, params: { repository_path: branch.repository, id: branch, format: :json }\n      results = JSON.parse(response.body)\n      expect(results['id']).to eq(branch.id)\n      expect(results['recent_builds'].length).to eq(Build.count)\n      expect(results['recent_builds'][0]['build']['id']).to eq(build1.id)\n      expect(results['recent_builds'][1]['build']['id']).to eq(build2.id)\n    end\n\n    context \"when the repository is disabled\" do\n      let(:branch2) { FactoryBot.create(:branch_on_disabled_repo) }\n\n      before do\n        build3 = FactoryBot.create(:build, branch_record: branch2, state: 'failed')\n        build_part = FactoryBot.create(:build_part, build_instance: build3)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed')\n      end\n\n      it \"should disable build button\" do\n        get :show, params: { repository_path: branch2.repository, id: branch2 }\n        expect(response.body).to match(/disabled=\"disabled\"/)\n      end\n    end\n\n    context \"when the repository is enabled\" do\n      let(:branch2) { FactoryBot.create(:branch) }\n\n      before do\n        build3 = FactoryBot.create(:build, branch_record: branch2, state: 'failed')\n        build_part = FactoryBot.create(:build_part, build_instance: build3)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed')\n      end\n\n      it \"shouldn't disable build button\" do\n        get :show, params: { repository_path: branch2.repository, id: branch2 }\n        expect(response.body).to_not match(/disabled=\"disabled\"/)\n      end\n    end\n  end\n\n  describe \"#request_new_build\" do\n    let(:branch) { FactoryBot.create(:branch) }\n    subject {\n      post :request_new_build, params: { repository_path: branch.repository.to_param, id: branch.to_param }\n    }\n\n    context \"when there is a new commit on the branch that hasn't been built\" do\n      before do\n        @sha = to_40('1')\n        fake_remote_server = double(:sha_for_branch => @sha)\n        allow(RemoteServer).to receive(:for_url).with(branch.repository.url).and_return(fake_remote_server)\n      end\n\n      it \"should create the new build and redirect there\" do\n        expect(branch.builds.where(ref: @sha).first).to be_nil\n\n        subject\n        new_build = branch.builds.where(ref: @sha).first\n        expect(new_build).to be_present\n\n        expect(response).to redirect_to(repository_build_path(branch.repository, new_build))\n      end\n    end\n\n    context \"when kochiku has already built the most recent commit on the branch\" do\n      let(:branch_head_sha) { \"4b41fe773057b2f1e2063eb94814d32699a34541\" }\n\n      before do\n        FactoryBot.create(:build, state: 'errored', branch_record: branch, ref: branch_head_sha)\n\n        fake_remote_server = double(:sha_for_branch => branch_head_sha)\n        allow(RemoteServer).to receive(:for_url).with(branch.repository.url).and_return(fake_remote_server)\n      end\n\n      it \"should not create a new build\" do\n        expect { subject }.to_not change { Build.count }\n\n        expect(flash[:error]).to be_nil\n        expect(flash[:warn]).to be_present\n      end\n\n      it \"should redirect to the existing build\" do\n        subject\n        expect(response).to redirect_to(repository_branch_path(branch.repository, branch))\n      end\n    end\n  end\n\n  describe \"#health\" do\n    let(:branch) { FactoryBot.create(:branch) }\n\n    context \"normal circumstances\" do\n      before do\n        build = FactoryBot.create(:build, branch_record: branch, state: 'succeeded')\n        build_part = FactoryBot.create(:build_part, build_instance: build)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed')\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'passed')\n      end\n\n      it \"should render\" do\n        get :health, params: { repository_path: branch.repository, id: branch }\n        expect(response).to be_success\n      end\n    end\n\n    context \"no builds are present\" do\n      it \"should not error\" do\n        get :health, params: { repository_path: branch.repository, id: branch }\n        expect(response).to be_success\n      end\n    end\n\n    context \"only older builds are present\" do\n      before do\n        # a build from 1 year ago\n        build = FactoryBot.create(:build, branch_record: branch, state: 'failed', created_at: 1.year.ago)\n        build_part = FactoryBot.create(:build_part, build_instance: build, created_at: 1.year.ago)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed', created_at: 1.year.ago)\n      end\n\n      context \"no builds from the last 30 days\" do\n        it \"should not error\" do\n          get :health, params: { repository_path: branch.repository, id: branch }\n          expect(response).to be_success\n        end\n      end\n\n      context \"no builds from the last 7 days\" do\n        before do\n          # a build from 10 days ago\n          build = FactoryBot.create(:build, branch_record: branch, state: 'failed', created_at: 10.days.ago)\n          build_part = FactoryBot.create(:build_part, build_instance: build, created_at: 10.days.ago)\n          FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed', created_at: 10.days.ago)\n        end\n\n        it \"should not error\" do\n          get :health, params: { repository_path: branch.repository, id: branch }\n          expect(response).to be_success\n        end\n      end\n\n    end\n  end\n\n  describe \"#build_time_history\" do\n    # the logic here is tested inside branch_spec and branch_decorator_spec. Just\n    # verify that the endpoint responds ok\n\n    let(:branch) { FactoryBot.create(:branch) }\n\n    before do\n      FactoryBot.create(:completed_build, branch_record: branch)\n    end\n\n    it \"should render\" do\n      get :build_time_history, params: { repository_path: branch.repository, id: branch, format: :json }\n      expect(response).to be_success\n    end\n  end\n\n  describe \"#status_report\" do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:branch) { FactoryBot.create(:master_branch, repository: repository) }\n\n    context \"when a branch has no builds\" do\n      before { expect(branch.builds).to be_empty }\n\n      it \"should return 'Unknown' for activity\" do\n        get :status_report, params: { format: :xml }\n        expect(response).to be_success\n\n        doc = Nokogiri::XML(response.body)\n        element = doc.at_xpath(\"/Projects/Project[@name='#{repository.to_param}']\")\n\n        expect(element['activity']).to eq('Unknown')\n      end\n    end\n\n    context \"with a in-progress build\" do\n      let!(:build) { FactoryBot.create(:build, state: 'running', branch_record: branch) }\n\n      it \"should return 'Building' for activity\" do\n        get :status_report, params: { format: :xml }\n        expect(response).to be_success\n\n        doc = Nokogiri::XML(response.body)\n        element = doc.at_xpath(\"/Projects/Project[@name='#{repository.to_param}']\")\n\n        expect(element['activity']).to eq('Building')\n      end\n    end\n\n    context \"with a completed build\" do\n      let!(:build) { FactoryBot.create(:build, state: 'failed', branch_record: branch) }\n\n      it \"should return 'CheckingModifications' for activity\" do\n        get :status_report, params: { format: :xml }\n        expect(response).to be_success\n\n        doc = Nokogiri::XML(response.body)\n        element = doc.at_xpath(\"/Projects/Project[@name='#{repository.to_param}']\")\n\n        expect(element['activity']).to eq('CheckingModifications')\n      end\n    end\n\n    context \"with extra convergence branch and one non-convergence\" do\n      before do\n        FactoryBot.create(:branch, :name => 'feature-branch', convergence: false, repository: repository)\n        FactoryBot.create(:branch, :name => 'convergence', convergence: true, repository: repository)\n      end\n\n      it \"should include all of the convergence branches\" do\n        branch ## Explicitly reference the branch to cause it to load\n\n        get :status_report, params: { format: :xml }\n\n        expect(response).to be_success\n\n        doc = Nokogiri::XML(response.body)\n        elements = doc.xpath(\"/Projects/Project\")\n\n        expect(elements).to have(2).items\n\n        names = elements.map{ |e| e.attribute(\"name\").to_s }\n\n        expect(names).to match_array([repository.to_param, \"#{repository.to_param}/convergence\"])\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/build_artifacts_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildArtifactsController do\n  describe \"#create\" do\n    let(:build) { FactoryBot.create(:build) }\n    let(:build_part) { build.build_parts.create!(:paths => [\"a\"], :kind => \"test\", :queue => 'ci') }\n    let(:build_attempt) { build_part.build_attempts.create!(:state => 'failed') }\n    let(:log_file) { fixture_file_upload(\"/build_artifact.log\", 'text/xml') }\n\n    it \"should create a build artifact for the build attempt\" do\n      log_contents = log_file.read\n      expect(log_contents).not_to be_empty\n      log_file.rewind\n\n      expect {\n        post :create, params: { :build_attempt_id => build_attempt.to_param, :build_artifact => {:log_file => log_file}, :format => :xml }\n      }.to change{ build_attempt.build_artifacts.count }.by(1)\n\n      artifact = assigns(:build_artifact)\n      expect(artifact.log_file.read).to eq(log_contents)\n    end\n\n    it \"should return the correct location\" do\n      post :create, params: { :build_attempt_id => build_attempt.to_param, :build_artifact => {:log_file => log_file}, :format => :xml }\n      expect(response).to be_success\n      expect(response.location).to eq(assigns(:build_artifact).log_file.url)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/build_attempts_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildAttemptsController do\n  describe \"#start\" do\n    it \"should set the start time and state of the build attempt\" do\n      build_attempt = FactoryBot.create(:build_attempt)\n      expect(build_attempt.state).to eq('runnable')\n      expect(build_attempt.started_at).to be_nil\n      expect(build_attempt.builder).to be_nil\n\n      post :start, params: { :id => build_attempt.to_param, :builder => \"build01\", :format => :json }\n      expect(response).to be_success\n\n      build_attempt.reload\n      expect(build_attempt.state).to eq('running')\n      expect(build_attempt.started_at).not_to be_nil\n      expect(build_attempt.builder).to eq(\"build01\")\n      expect(build_attempt.log_streamer_port).to be_nil\n    end\n\n    it \"should set log streamer port if provided\" do\n      build_attempt = FactoryBot.create(:build_attempt)\n      expect(build_attempt.state).to eq('runnable')\n      expect(build_attempt.started_at).to be_nil\n      expect(build_attempt.builder).to be_nil\n      expect(build_attempt.log_streamer_port).to be_nil\n\n      post :start, params: { :id => build_attempt.to_param, :builder => \"build01\", :logstreamer_port => 10000, :format => :json }\n      expect(response).to be_success\n\n      build_attempt.reload\n      expect(build_attempt.state).to eq('running')\n      expect(build_attempt.started_at).not_to be_nil\n      expect(build_attempt.builder).to eq(\"build01\")\n      expect(build_attempt.log_streamer_port).to eq(10000)\n    end\n\n    it \"should return aborted if the build_attempt is aborted\" do\n      build_attempt = FactoryBot.create(:build_attempt, :state => 'aborted')\n\n      post :start, params: { :id => build_attempt.to_param, :builder => \"build01\", :format => :json }\n      expect(response).to be_success\n\n      expect(JSON.parse(response.body)[\"build_attempt\"][\"state\"]).to eq(\"aborted\")\n      expect(build_attempt.reload.state).to eq('aborted')\n    end\n  end\n\n  describe \"#finish\" do\n    it \"should set the finish time and state of the build attempt\" do\n      build_attempt = FactoryBot.create(:build_attempt)\n      expect(build_attempt.state).to eq('runnable')\n      expect(build_attempt.finished_at).to be_nil\n\n      post :finish, params: { :id => build_attempt.to_param, :state => \"passed\", :format => :json }\n      expect(response).to be_success\n\n      build_attempt.reload\n      expect(build_attempt.state).to eq('passed')\n      expect(build_attempt.finished_at).not_to be_nil\n    end\n\n    it \"should return errors when the build_attempt fails to start\" do\n      build_attempt = FactoryBot.create(:build_attempt)\n\n      post :finish, params: { :id => build_attempt.to_param, :state => \"invalid-state\", :format => :json }\n      expect(response.code).to eq(\"422\")\n\n      expect(JSON.parse(response.body)['state']).not_to be_blank\n    end\n\n    it \"should redirect to the build_part's URL for HTML requests\" do\n      build_attempt = FactoryBot.create(:build_attempt)\n\n      post :finish, params: { :id => build_attempt.to_param, :state => \"aborted\", :format => :html }\n\n      expect(response.code).to eq(\"302\")\n      build_attempt.reload\n      expect(build_attempt.state).to eq('aborted')\n    end\n  end\n\n  describe \"#stream_logs\" do\n    it \"should return 404 for build attempt that doesn't have log streaming port\" do\n      build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => nil)\n\n      get :stream_logs, params: { :id => build_attempt.to_param, :format => :html }\n      expect(response.code). to eq(\"404\")\n    end\n  end\n\n  describe \"#stream_logs_chunk\" do\n    it \"should return error for build attempt that doesn't have log streaming port\" do\n      build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => nil)\n\n      get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json }\n      expect(response.code).to eq(\"500\")\n      expect(JSON.parse(response.body)['error']).to eq(\"No log streaming available for this build attempt\")\n    end\n\n    it \"should return error for build attempt that doesn't have builder\" do\n      build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => nil)\n\n      get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json }\n      expect(response.code).to eq(\"500\")\n      expect(JSON.parse(response.body)['error']).to eq(\"No log streaming available for this build attempt\")\n    end\n\n    context \"logstreamer not successful\" do\n      before do\n        stub_request(:get, \"http://worker.example.com:10000/build_attempts/100/log/stdout.log?maxBytes=250000&start=0\").to_return(:status => 500, :body => \"{}\", :headers => {})\n      end\n      it \"should return error when logstreamer errors\" do\n        build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => 'worker.example.com', :id => 100)\n\n        get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json }\n        expect(response.code).to eq(\"500\")\n        expect(JSON.parse(response.body)['error']).to eq(\"unable to reach log streamer\")\n      end\n    end\n\n    context \"logstreamer successful\" do\n      let(:logstreamer_body) { '{\"Start\" : 0, \"Contents\" : \"This is a test\\n\", \"BytesRead\": 15, \"LogName\": \"stdout.log\"}' }\n\n      before do\n        stub_request(:get, \"http://worker.example.com:10000/build_attempts/100/log/stdout.log?maxBytes=250000&start=0\").to_return(:status => 200, :body => logstreamer_body, :headers => {})\n      end\n      it \"should proxy request from logstreamer and add build attempt state\" do\n        build_attempt = FactoryBot.create(:build_attempt, :log_streamer_port => 10000, :builder => 'worker.example.com', :id => 100, state: 'running')\n\n        get :stream_logs_chunk, params: { :id => build_attempt.to_param, :format => :json }\n        expect(response.code).to eq(\"200\")\n        response_hash = JSON.parse(response.body)\n        logstreamer_hash = JSON.parse(logstreamer_body)\n        expect(response_hash.merge(logstreamer_hash)).to eq(response_hash) # check that response_hash includes all attributes from logstreamer_hash\n        expect(response_hash['state']).to eq(\"running\")\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/build_parts_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildPartsController do\n  render_views\n\n  let(:build) { FactoryBot.create(:build) }\n  let(:repository) { build.repository }\n  let(:build_part) { build.build_parts.create!(:paths => [\"a\"], :kind => \"test\", :queue => 'ci') }\n\n  describe \"#show\" do\n    it \"renders the show template successfully even if elapsed time is nil\" do\n      expect(build_part.elapsed_time).to eq(nil)\n      get :show, params: { repository_path: repository, build_id: build, id: build_part }\n      expect(response).to be_success\n      expect(response).to render_template(\"build_parts/show\")\n    end\n\n    it \"renders JSON if requested\" do\n      build_attempt = FactoryBot.create(:build_attempt, build_part: build_part)\n      FactoryBot.create(:build_artifact, build_attempt: build_attempt)\n      get :show, params: { repository_path: repository, build_id: build, id: build_part, format: :json }\n      ret = JSON.parse(response.body)\n      expect(ret['build_part']['build_attempts'].length).to eq(1)\n      expect(ret['build_part']['build_attempts'][0]['files'].length).to eq(1)\n      expect(ret['build_part']['build_attempts'][0]['files'][0]['build_artifact']['build_attempt_id']).to eq(build_attempt.id)\n    end\n\n    context \"when the repository is disabled\" do\n      let(:build2) { FactoryBot.create(:build_on_disabled_repo) }\n      let(:build_part2) { FactoryBot.create(:build_part, build_instance: build2) }\n\n      it \"should not show Rebuild button\" do\n        get :show, params: { repository_path: build2.repository, build_id: build2, id: build_part2 }\n        expect(response.body).to_not match(/class=\"rebuild button\"/)\n      end\n    end\n\n    context \"when the repository is enabled\" do\n      let(:build2) { FactoryBot.create(:build) }\n      let(:build_part2) { FactoryBot.create(:build_part, build_instance: build2) }\n\n      it \"should show Rebuild button\" do\n        get :show, params: { repository_path: build2.repository, build_id: build2, id: build_part2 }\n        expect(response.body).to match(/class=\"rebuild button\"/)\n      end\n    end\n  end\n\n  describe \"#rebuild\" do\n    subject {\n      get :rebuild, params: { repository_path: repository, build_id: build, id: build_part }\n    }\n\n    it \"should redirect to the right place\" do\n      allow_any_instance_of(Build).to receive(:test_command).and_return(\"echo just chill\")\n\n      subject\n      expect(response).to redirect_to(repository_build_path(build.repository, build))\n    end\n\n    context \"the requested commit SHA no longer exists\" do\n      before do\n        allow_any_instance_of(Build).to receive(:test_command).and_raise(GitRepo::RefNotFoundError)\n      end\n\n      it \"should not create a new build attempt\" do\n        build_part  # trigger creation of the db records\n\n        expect {\n          subject\n        }.to_not change { build_part.build_attempts.count }\n      end\n\n      it \"should display a flash error\" do\n        subject\n        expect(flash[:error]).to be_present\n      end\n    end\n  end\n\n  describe '#refresh_build_part_info' do\n    it \"returns partials for build_attempts\" do\n      build_attempt = FactoryBot.create(:build_attempt, build_part: build_part)\n      FactoryBot.create(:build_artifact, build_attempt: build_attempt)\n      get :refresh_build_part_info, params: { repository_path: repository, build_id: build, id: build_part, format: :json }\n      res = JSON.parse(response.body)\n      expect(res.first['state']).to eq(build_attempt.state)\n      expect(res.first['content']).to include(\"/build_attempts/#{build_attempt.id}/\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/builds_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildsController do\n  describe \"#create\" do\n    let(:action) { :create }\n    let(:repo) { FactoryBot.create(:repository) }\n    let!(:branch) { FactoryBot.create(:branch, repository: repo, name: 'gummy-bears') }\n    let(:git_sha) { '30b111147d9a245468c6650f54de5c16584bc154' }\n    let(:params) {\n      {\n        repo_url: repo.url,\n        git_branch: branch.name,\n        git_sha: git_sha,\n      }\n    }\n\n    it \"should return a 404 if the repo does not exist\" do\n      repo.destroy\n      post action, params: params\n      expect(response.code).to eq(\"404\")\n    end\n\n    it \"should create a branch record if it does not exist\" do\n      branch_name = branch.name\n      branch.destroy\n      expect {\n        post action, params: params\n      }.to change { Branch.exists?(name: branch_name) }.from(false).to(true)\n    end\n\n    it \"should create a new build\" do\n      expect {\n        post action, params: params\n      }.to change { Build.exists?(ref: git_sha) }.from(false).to(true)\n    end\n\n    it \"sets merge_on_success when param given\" do\n      post action, params: params.merge(merge_on_success: \"1\")\n      new_build = Build.where(ref: git_sha).first\n      expect(new_build.merge_on_success).to be(true)\n    end\n\n    it \"defaults merge_on_success to false when param not given\" do\n      expect(params.key?(:merge_on_success)).to be(false)\n      post action, params: params\n      new_build = Build.where(ref: git_sha).first\n      expect(new_build.merge_on_success).to be(false)\n    end\n\n    it \"should return the build info page in the location header\" do\n      post action, params: params\n\n      new_build = Build.where(ref: git_sha).first\n      expect(new_build).to be_present\n\n      expect(response.location).to eq(repository_build_url(repo, new_build))\n    end\n\n    context \"a specific git_sha is not specified\" do\n      let(:params) {\n        {\n          repo_url: repo.url,\n          git_branch: branch.name,\n        }\n      }\n\n      before do\n        mocked_remote_server = RemoteServer.for_url(repo.url)\n        allow(mocked_remote_server).to receive(:sha_for_branch).with(branch.name).and_return(to_40(\"2\"))\n        allow(RemoteServer).to receive(:for_url).with(repo.url).and_return(mocked_remote_server)\n      end\n\n      it \"should create a build for the HEAD commit on the given branch\" do\n        expect {\n          post action, params: params\n        }.to change { Build.exists?(ref: to_40(\"2\"), branch_record: branch) }.from(false).to(true)\n      end\n    end\n\n    context \"when the pushed sha has already been built\" do\n      it \"has no effect\" do\n        branch = FactoryBot.create(:branch, repository: repo, name: 'other-branch')\n        build = FactoryBot.create(:build, branch_record: branch, ref: git_sha)\n        expect {\n          post action, params: params\n          expect(response).to be_success\n        }.to_not change(Build, :count)\n        expect(response.headers[\"Location\"]).to eq(repository_build_url(repo, build))\n      end\n    end\n\n    context \"when the sha is already associated with another branch under this repo\" do\n      it \"should return a URL to the existing build\" do\n        other_branch = FactoryBot.create(:branch, repository: repo)\n        other_build = FactoryBot.create(:build, branch_record: other_branch, ref: git_sha)\n        branch  # ensure the 'let' gets invoked\n\n        post action, params: params\n        expect(response).to be_success\n        expect(response.headers[\"Location\"]).to eq(repository_build_url(repo, other_build))\n      end\n    end\n\n    context \"when the sha is already used by a different repo\" do\n      it \"should create a new build\" do\n        other_repo = FactoryBot.create(:repository)\n        other_branch = FactoryBot.create(:branch, repository: other_repo)\n        other_build = FactoryBot.create(:build, branch_record: other_branch, ref: git_sha)\n\n        expect {\n          post action, params: params\n          expect(response).to be_success\n        }.to change(Build, :count).by(1)\n        expect(response.headers[\"Location\"]).to_not eq(repository_build_url(other_repo, other_build))\n      end\n    end\n\n    it \"should allow the repository url to be in an alternate format\" do\n      expect(repo).to_not be_new_record\n\n      post action, params: params.merge(repo_url: \"https://github.com/#{repo.namespace}/#{repo.name}.git\")\n      expect(response).to be_success\n    end\n  end\n\n  describe \"#show\" do\n    it \"should return a valid JSON\" do\n      branch = FactoryBot.create(:branch, name: 'gummy-bears')\n      build = FactoryBot.create(:build, branch_record: branch, :test_command => \"script/ci\")\n      build_part = FactoryBot.create(:build_part, build_instance: build)\n      FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed')\n      get :show, params: { repository_path: branch.repository, id: build.id, format: :json }\n      ret = JSON.parse(response.body)\n      expect(ret['build']['build_parts'].length).to eq(1)\n      expect(ret['build']['build_parts'][0]['build_id']).to eq(build.id)\n      expect(ret['build']['build_parts'][0]['status']).to eq('passed')\n    end\n\n    context \"when the repository is disabled\" do\n      render_views\n      let(:build) {\n        FactoryBot.create(:build_on_disabled_repo, state: 'failed')\n      }\n\n      it \"should not show 'Rebuild failed parts' button or rebuild action in #build-summary table\" do\n        build_part = FactoryBot.create(:build_part, build_instance: build)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed')\n        get :show, params: { repository_path: build.repository, id: build.id }\n        expect(response.body).to_not match(/<input.*value=\"Rebuild failed parts\"/)\n        expect(response.body).to_not match(%r{<a.*>Rebuild<\\/a>$})\n      end\n\n      it \"should not show 'Retry Partitioning' button\" do\n        build.build_parts.delete_all\n        get :show, params: { repository_path: build.repository, id: build.id }\n        expect(response.body).to_not match(/<input.*value=\"Retry Partitioning\"/)\n      end\n    end\n\n    context \"when the repository is enabled\" do\n      render_views\n      let(:build) {\n        FactoryBot.create(:build, state: 'failed')\n      }\n\n      it \"should show 'Rebuild failed parts' button or rebuild action in #build-summary table\" do\n        build_part = FactoryBot.create(:build_part, build_instance: build)\n        FactoryBot.create(:completed_build_attempt, build_part: build_part, state: 'failed')\n        get :show, params: { repository_path: build.repository, id: build.id }\n        expect(response.body).to match(/<input.*value=\"Rebuild failed parts\"/)\n        expect(response.body).to match(%r{<a.*>Rebuild<\\/a>$})\n      end\n\n      it \"should show 'Retry Partitioning' button\" do\n        build.build_parts.delete_all\n        get :show, params: { repository_path: build.repository.to_param, id: build.id }\n        expect(response.body).to match(/<input.*value=\"Retry Partitioning\"/)\n      end\n    end\n  end\n\n  describe \"#abort\" do\n    before do\n      @build = FactoryBot.create(:build)\n      put :abort, params: { repository_path: @build.repository.to_param, id: @build.to_param }\n    end\n\n    it \"redirects back to the build page\" do\n      expect(response).to redirect_to(repository_build_path(@build.repository, @build))\n    end\n\n    # spot-check that it does some abort action\n    it \"sets the build's state to aborted\" do\n      expect(@build.reload.state).to eq('aborted')\n    end\n  end\n\n  describe \"#on_success_log_file link\" do\n    render_views\n    let(:build) { FactoryBot.create(:build, state: 'succeeded') }\n    before do\n      @action = :show\n      @params = {:id => build.id, :repository_path => build.repository}\n    end\n\n    context \"has on_success_log_file\" do\n      before do\n        output = \"Exited with status: 0\"\n        script_log = FilelessIO.new(output)\n        script_log.original_filename = \"on_success_script.log\"\n        build.on_success_script_log_file = script_log\n        build.save\n      end\n\n      it \"displays link to on_success_log_file\" do\n        get @action, params: @params\n        doc = Nokogiri::HTML(response.body)\n        elements = doc.search(\"[text()*='on_success_script.log']\")\n        expect(elements.size).to eq(1)\n      end\n    end\n\n    context \"does not have on_success_log_file\" do\n      it \"does not display link to on_success_log_file\" do\n        get @action, params: @params\n        doc = Nokogiri::HTML(response.body)\n        elements = doc.search(\"[text()*='on_success_script.log']\")\n        expect(elements.size).to eq(0)\n      end\n    end\n  end\n\n  describe \"#toggle_merge_on_success\" do\n    before do\n      @build = FactoryBot.create(:build, :merge_on_success => true)\n    end\n\n    it \"aborts merge_on_success\" do\n      post :toggle_merge_on_success, params: { id: @build.to_param, repository_path: @build.repository.to_param, merge_on_success: false }\n      expect(response).to redirect_to(repository_build_path(@build.repository, @build))\n      expect(@build.reload.merge_on_success).to be false\n    end\n\n    it \"enables merge_on_success\" do\n      @build.update_attributes(:merge_on_success => false)\n      post :toggle_merge_on_success, params: { id: @build.to_param, repository_path: @build.repository.to_param, merge_on_success: true }\n      expect(response).to redirect_to(repository_build_path(@build.repository, @build))\n      expect(@build.reload.merge_on_success).to be true\n    end\n  end\n\n  describe \"merge_on_success checkbox\" do\n    render_views\n    let(:build) { FactoryBot.create(:build) }\n\n    before do\n      @action = :show\n      @params = {:id => build.id, :repository_path => build.repository}\n    end\n\n    it \"renders the merge_on_success checkbox\" do\n      get @action, params: @params\n      doc = Nokogiri::HTML(response.body)\n      elements = doc.css(\"input[name=merge_on_success]\")\n      expect(elements.size).to eq(1)\n      expect(elements.first['checked']).to be_blank\n    end\n\n    context \"for builds with merge_on_success enabled\" do\n      let(:build) { FactoryBot.create(:build, merge_on_success: true) }\n      it \"renders the merge_on_success checkbox\" do\n        get @action, params: @params\n        doc = Nokogiri::HTML(response.body)\n        elements = doc.css(\"input[name=merge_on_success]\")\n        expect(elements.size).to eq(1)\n        expect(elements.first['checked']).to be_present\n      end\n    end\n\n    context \"for builds on a convergence branch\" do\n      let(:build) { FactoryBot.create(:convergence_branch_build) }\n\n      it \"renders the merge_on_success checkbox disabled\" do\n        get @action, params: @params\n        doc = Nokogiri::HTML(response.body)\n        elements = doc.css(\"input[name=merge_on_success]\")\n        expect(elements.size).to eq(1)\n        expect(elements.first['disabled']).to be_present\n      end\n    end\n  end\n\n  describe \"#rebuild_failed_parts\" do\n    let(:build) { FactoryBot.create(:build) }\n    let(:parts) { (1..4).map { FactoryBot.create(:build_part, :build_instance => build) } }\n\n    before do\n      allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n    end\n\n    subject { post :rebuild_failed_parts, params: { repository_path: build.repository.to_param, id: build.id } }\n\n    context \"happy path\" do\n      before do\n        @attempt_1 = FactoryBot.create(:build_attempt, :build_part => parts[0], :state => 'failed')\n        @attempt_2 = FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'failed')\n        @attempt_3 = FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'errored')\n        @attempt_4 = FactoryBot.create(:build_attempt, :build_part => parts[2], :state => 'passed')\n        @attempt_5 = FactoryBot.create(:build_attempt, :build_part => parts[3], :state => 'aborted')\n      end\n\n      it \"rebuilds all failed attempts\" do\n        expect(build.build_parts.failed_errored_or_aborted.count).to eq(3)\n        subject\n        expect(build.reload.build_parts.failed.count).to be_zero\n        expect(build.build_attempts.count).to eq(5 + 3)\n      end\n\n      it \"only enqueues one build attempt for each failed build part\" do\n        subject\n        expect(parts[0].reload.build_attempts.count).to eq(2)\n        expect(parts[1].reload.build_attempts.count).to eq(3)\n        expect(parts[3].reload.build_attempts.count).to eq(2)\n\n        expect {\n          # repost to test idempotency\n          post :rebuild_failed_parts, params: { repository_path: build.repository.to_param, id: build.id }\n        }.to_not change(BuildAttempt, :count)\n      end\n    end\n\n    context \"an successful prior build attempt should not be rebuilt\" do\n      it \"does something\" do\n        FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'passed') # attempt 1\n        FactoryBot.create(:build_attempt, :build_part => parts[1], :state => 'failed') # attempt 2\n\n        expect { subject }.to_not change(BuildAttempt, :count)\n      end\n    end\n  end\n\n  describe \"#retry_partitioning\" do\n    let!(:build) { FactoryBot.create(:build) }\n    before do\n      allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n    end\n\n    context \"when there are no build parts\" do\n      it \"enques a partitioning job\" do\n        expect(Resque).to receive(:enqueue)\n        post :retry_partitioning, params: { repository_path: build.repository.to_param, id: build.id }\n        expect(response).to redirect_to(repository_build_path(build.repository, build))\n      end\n    end\n\n    context \"when there are already build parts\" do\n      it \"does nothing\" do\n        expect(Resque).to_not receive(:enqueue)\n        FactoryBot.create(:build_part, build_instance: build)\n        post :retry_partitioning, params: { repository_path: build.repository.to_param, id: build.id }\n        expect(response).to redirect_to(repository_build_path(build.repository, build))\n      end\n    end\n\n    context \"when the build's repository is disabled\" do\n      it \"should not partition build\" do\n        build2 = FactoryBot.create(:build_on_disabled_repo)\n        expect(Resque).to_not receive(:enqueue)\n        post :retry_partitioning, params: { repository_path: build2.repository.to_param, id: build2.id }\n        expect(response).to redirect_to(repository_build_path(build2.repository, build2))\n      end\n    end\n  end\n\n  describe \"#build_redirect\" do\n    it \"should redirect to the full build show url\" do\n      build = FactoryBot.create(:build)\n      get :build_redirect, params: { id: build.id }\n      expect(response).to redirect_to(repository_build_path(build.repository, build))\n    end\n  end\n\n  describe \"#build_ref_redirect\" do\n    it \"should redirect to the build show url that matches the ref given\" do\n      build = FactoryBot.create(:build)\n      get :build_ref_redirect, params: { ref: build.ref[0, 8] }\n      expect(response).to redirect_to(repository_build_path(build.repository, build))\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/dashboards_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe DashboardsController do\n\n  describe \"#build_history_by_worker\" do\n    it \"should render worker health page\" do\n      get :build_history_by_worker\n      expect(response).to be_success\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/controllers/pull_requests_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe PullRequestsController do\n  describe \"#build\" do\n\n    shared_examples \"common behavior\" do |git_server_type|\n      before do\n        class_eval do\n          alias_method :push_payload, \"#{git_server_type}_push_payload\".to_sym\n          alias_method :pull_request_payload, \"#{git_server_type}_pull_request_payload\".to_sym\n        end\n      end\n\n      let!(:repository) do\n        FactoryBot.create(:repository, { url: \"git@#{git_server_type}.com:square/web.git\" }.merge(repository_fields))\n      end\n      let(:repository_fields) { {} }  # expected to be overwritten by a sub-context\n\n      context \"when push requests come\" do\n        let(:repository_fields) { { run_ci: true } }\n        let!(:master_branch) { FactoryBot.create(:master_branch, repository: repository) }\n        let!(:convergence_branch) { FactoryBot.create(:branch, repository: repository, convergence: true, name: \"convergence_branch\") }\n\n        it \"creates a build\" do\n          expect(Build.where(branch_id: master_branch.id, ref: to_40('2'))).to_not exist\n          expect {\n            post :build, params: push_payload\n            expect(response).to be_success\n          }.to change(Build, :count).by(1)\n          expect(Build.where(branch_id: master_branch.id, ref: to_40('2'))).to exist\n        end\n\n        it \"does not create a build for pushes to non-convergence branches\" do\n          expect {\n            post :build, params: push_payload(\"ref\" => \"refs/heads/some-branch\")\n            expect(response).to be_success\n          }.to_not change(Build, :count)\n        end\n\n        it \"does not create a build if repository has ci disabled\" do\n          repository.update_attributes!(:run_ci => false)\n          expect {\n            post :build, params: push_payload\n            expect(response).to be_success\n          }.to_not change(Build, :count)\n        end\n\n        it \"does not build if there is already a ci build in progress\" do\n          master_branch.builds.create!(:ref => to_40('w'), :state => 'succeeded')\n          frozen_time = 3.seconds.from_now\n          allow(Time).to receive(:now).and_return(frozen_time)\n          master_branch.builds.create!(:ref => to_40('y'), :state => 'partitioning')\n          expect {\n            post :build, params: push_payload\n            expect(response).to be_success\n          }.to_not change(Build, :count)\n        end\n\n        it \"builds for convergence branches\" do\n          expect {\n            post :build, params: push_payload(\"ref\" => \"refs/heads/convergence_branch\")\n            expect(response).to be_success\n          }.to change(Build, :count).by(1)\n        end\n\n        it \"builds if there is completed ci build\" do\n          master_branch.builds.create!(:ref => to_40('w'), :state => 'succeeded')\n          expect {\n            post :build, params: push_payload\n            expect(response).to be_success\n          }.to change(Build, :count).by(1)\n        end\n\n        it \"builds if there is a completed ci build after a build that is still building\" do\n          master_branch.builds.create!(:ref => to_40('w'), :state => 'partitioning')\n          frozen_time = 3.seconds.from_now\n          allow(Time).to receive(:now).and_return(frozen_time)\n          master_branch.builds.create!(:ref => to_40('y'), :state => 'succeeded')\n          expect {\n            post :build, params: push_payload\n            expect(response).to be_success\n          }.to change(Build, :count).by(1)\n        end\n\n        it \"it should not error if the repository url in the request is not found\" do\n          key = git_server_type == \"github\" ? \"ssh_url\" : \"url\"\n          repository_overrides = {\n            key => \"git@git.#{git_server_type}.com:doesnot/exist.git\",\n            \"host\" => nil,\n            \"key\" => nil,\n            \"slug\" => nil,\n            \"name\" => nil,\n            \"full_name\" => nil\n          }\n          expect {\n            post :build, params: push_payload(\"repository\" => repository_overrides)\n            expect(response).to be_success\n          }.to_not change(Build, :count)\n        end\n      end\n\n      context \"for pull requests\" do\n        let(:repository_fields) { { run_ci: true, build_pull_requests: true } }\n\n        context \"when there is no existing Branch record\" do\n          it \"should create a Branch record on demand\" do\n            expect(Branch.where(repository: repository, name: \"branch-name\")).to_not exist\n            post :build, params: pull_request_payload\n            expect(response).to be_success\n            expect(Branch.where(repository: repository, name: \"branch-name\")).to exist\n          end\n        end\n\n        context \"when the pull request sha has already been built\" do\n          before do\n            @github_payload = pull_request_payload(\n              \"pull_request\" => {\n                \"head\" => {\"sha\" => \"de8251ff97ee194a289832576287d6f8ad74e3d0\", \"ref\" => \"branch-name\"},\n                \"body\" => \"best pull request ever\",\n              })\n          end\n          let!(:branch) { FactoryBot.create(:branch, repository: repository, name: \"branch-name\") }\n\n          it \"has no effect\" do\n            FactoryBot.create(:build, branch_record: branch, branch_id: branch.id, ref: \"de8251ff97ee194a289832576287d6f8ad74e3d0\")\n\n            expect {\n              post :build, params: @github_payload\n              expect(response).to be_success\n            }.to_not change(Build, :count)\n          end\n\n          it \"still creates a build if the sha is used by a different repo\" do\n            common_sha = \"de8251ff97ee194a289832576287d6f8ad74e3d0\"\n\n            # create build with the same ref on a different repository\n            repo2 = FactoryBot.create(:repository, url: \"git@git.#{git_server_type}.com:square/other-repo.git\")\n            repo2_branch = FactoryBot.create(:branch, repository: repo2)\n            FactoryBot.create(:build, branch_record: repo2_branch, ref: common_sha)\n\n            expect(Build.where(branch_id: branch.id, ref: common_sha)).to_not exist\n            post :build, params: @github_payload\n            expect(Build.where(branch_id: branch.id, ref: common_sha)).to exist\n          end\n        end\n\n        context do\n          let(:branch) { FactoryBot.create(:branch, repository: repository, name: \"branch-name\") }\n\n          it \"creates a build for a pull request\" do\n            expect(Build.where(branch_id: branch.id, ref: to_40('1'))).to_not exist\n            expect {\n              post :build, params: pull_request_payload\n              expect(response).to be_success\n            }.to change(Build, :count).by(1)\n            expect(Build.where(branch_id: branch.id, ref: to_40('1'))).to exist\n          end\n\n          it \"does not create a build if build_pull_requests is disabled\" do\n            repository.update_attribute(:build_pull_requests, false)\n            expect {\n              post :build, params: pull_request_payload({\"pull_request\" => {\"body\" => \"don't build it\"}})\n              expect(response).to be_success\n            }.to_not change(branch.builds, :count)\n          end\n\n          it \"does not build a closed pull request\" do\n            closed_action = git_server_type == \"github\" ? {\"pull_request\" => {\"state\" => \"closed\"}} : {\"action\" => \"closed\"}\n            expect {\n              post :build, params: pull_request_payload(closed_action)\n              expect(response).to be_success\n            }.to_not change(branch.builds, :count)\n          end\n\n          it \"does not blow up if action is missing\" do\n            post :build, params: pull_request_payload({\"action\" => nil})\n            expect(response).to be_success\n          end\n\n          context \"when there are other builds for the same branch\" do\n            let!(:build_one) { FactoryBot.create(:build, :branch_record => branch, :ref => to_40('w')) }\n            let!(:build_two) { FactoryBot.create(:build, :branch_record => branch, :ref => to_40('y')) }\n\n            it \"aborts previous builds of the same branch\" do\n              github_payload = pull_request_payload(\n                \"pull_request\" => {\n                  \"head\" => {\"sha\" => to_40('z'), \"ref\" => branch.name},\n                  \"body\" => \"best pull request ever\",\n                })\n\n              expect {\n                post :build, params: github_payload\n                expect(response).to be_success\n              }.to change(Build, :count)\n              expect(build_one.reload).to be_aborted\n              expect(build_two.reload).to be_aborted\n            end\n          end\n        end\n      end\n\n      it \"does not blow up if pull_request data is missing\" do\n        expect {\n          post :build, params: pull_request_payload({\"pull_request\" => nil})\n          expect(response).to be_success\n        }.to_not change(Build, :count)\n      end\n\n      it \"should not error if the repository url in the request is not found\" do\n        expect {\n          pr_payload = pull_request_payload(\"repository\" => { \"ssh_url\" => \"git@git.#{git_server_type}.com:doesnot/exist.git\" })\n          post :build, params: pr_payload\n          expect(response).to be_success\n        }.to_not change(Build, :count)\n      end\n    end\n\n    context \"from github\" do\n      before do\n        settings = SettingsAccessor.new(<<-YAML)\n        git_servers:\n          github.com:\n            type: github\n        YAML\n        stub_const \"Settings\", settings\n      end\n\n      include_examples \"common behavior\", \"github\"\n    end\n\n    context \"from stash\" do\n      before do\n        settings = SettingsAccessor.new(<<-YAML)\n        git_servers:\n          stash.com:\n            type: stash\n        YAML\n        stub_const \"Settings\", settings\n      end\n\n      include_examples \"common behavior\", \"stash\"\n    end\n  end\n\n  def github_push_payload(options = {})\n    {\n      \"ref\" => \"refs/heads/master\",\n      \"repository\" => {\n        \"name\" => \"web\",\n        \"full_name\" => \"square/web\",\n        \"ssh_url\" => \"git@github.com:square/web.git\",\n      },\n      \"head_commit\" => {\n        \"id\" => to_40('2')\n      }\n    }.deep_merge(options)\n  end\n\n  def github_pull_request_payload(options = {})\n    {\n      \"action\" => \"opened\",\n      \"pull_request\" => {\n        \"state\" => \"open\",\n        \"head\" => {\n          \"ref\" => \"branch-name\",\n          \"sha\" => to_40('1')\n        }\n      },\n      \"repository\" => {\n        \"name\" => \"web\",\n        \"full_name\" => \"square/web\",\n        \"owner\" => {\n          \"login\" => \"square\"\n        },\n        \"ssh_url\" => \"git@github.com:square/web.git\"\n      }\n    }.deep_merge(options)\n  end\n\n  def stash_push_payload(options = {})\n    {\n      \"payload\" => {\n        \"after\" => to_40('2'),\n        \"repository\" => {\n          \"id\" => \"252\",\n          \"url\" => \"https://stash.com/scm/square/web.git\",\n          \"key\" => \"square\",\n          \"slug\" => \"web\",\n          \"name\" => \"web\",\n        },\n        \"host\" => \"stash.com\",\n        \"ref\" => \"refs/heads/master\",\n      }.deep_merge(options).to_json\n    }\n  end\n\n  def stash_pull_request_payload(options = {})\n    {\n      \"payload\" => {\n        \"pull_request\" => {\n          \"head\" => {\n            \"sha\" => to_40('1'),\n            \"ref\" => \"refs/heads/branch-name\",\n            \"to_ref\" => \"refs/heads/master\"\n          },\n          \"body\" => \"best pull request ever\",\n          \"title\" => \"this is a pull request\",\n        },\n        \"repository\" => {\n          \"id\" => \"252\",\n          \"url\" => \"https://stash.com/scm/square/web.git\",\n          \"key\" => \"square\",\n          \"slug\" => \"web\",\n          \"name\" => \"web\",\n        },\n        \"host\" => \"stash.com\",\n        \"action\" => \"synchronize\",\n        \"type\" => \"pr\"\n      }.deep_merge(options).to_json\n    }\n  end\nend\n"
  },
  {
    "path": "spec/controllers/repositories_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe RepositoriesController do\n  render_views\n  describe \"create action\" do\n    before do\n      @params = {\n        repository: { url: \"git@git.example.com:square/kochiku.git\", test_command: \"script/something\" },\n        convergence_branches: \"\",\n      }\n    end\n    it \"should perform a basic create\" do\n      expect{\n        post :create, params: @params\n        expect(response).to be_redirect\n      }.to change(Repository, :count).by(1)\n      repository = Repository.where(url: \"git@git.example.com:square/kochiku.git\").first\n      expect(repository).to be_present\n      expect(repository.name).to eq('kochiku')\n    end\n\n    it \"sets host, namespace, and name based on the repo url\" do\n      post :create, params: @params\n      repository = Repository.where(url: \"git@git.example.com:square/kochiku.git\").first\n      expect(repository.host).to eq('git.example.com')\n      expect(repository.namespace).to eq('square')\n      expect(repository.name).to eq('kochiku')\n    end\n\n    it \"creates a branch_record for the convergence branches\" do\n      post :create, params: @params.merge(convergence_branches: \"master, release-1-x\")\n      expect(response).to be_redirect\n      expect(Branch.exists?(name: 'master', convergence: true)).to be(true)\n      expect(Branch.exists?(name: 'release-1-x', convergence: true)).to be(true)\n    end\n\n    context \"with validation errors\" do\n      it \"re-renders form with errors\" do\n        # timeout outside of the allowable range\n        @params[:repository][:timeout] = '1000000'\n\n        post :create, params: @params\n        expect(response).to be_success\n        expect(assigns[:repository].errors.full_messages.join(','))\n          .to include(\"The maximum timeout allowed is 1440 minutes\")\n        expect(response).to render_template('new')\n      end\n    end\n  end\n\n  describe \"update\" do\n    let!(:repository) { FactoryBot.create(:repository, :url => \"git@git.example.com:square/kochiku.git\") }\n\n    it \"updates existing repository\" do\n      expect{\n        patch :update, params: { :id => repository.id, :repository => {:url => \"git@git.example.com:square/kochiku-worker.git\"} }\n        expect(response).to be_redirect\n      }.to_not change(Repository, :count)\n      repository.reload\n      expect(repository.url).to eq(\"git@git.example.com:square/kochiku-worker.git\")\n      expect(response).to be_redirect\n    end\n\n    context \"with invalid data\" do\n      let(:params) { { timeout: 'abc' } }\n\n      it \"re-renders the edit page\" do\n        patch :update, params: { id: repository.id, repository: params }\n        expect(response).to be_success\n        expect(response).to render_template('edit')\n      end\n    end\n\n    # boolean attributes\n    [\n      :enabled,\n      :run_ci,\n      :build_pull_requests,\n      :send_build_failure_email,\n      :send_build_success_email,\n      :send_merge_successful_email,\n      :allows_kochiku_merges\n    ].each do |attribute|\n      it \"should successfully update the #{attribute} attribute\" do\n        start_value = repository.send(attribute)\n        inverse_value_as_str = start_value ? \"0\" : \"1\"\n        patch :update, params: { id: repository.id, repository: { attribute => inverse_value_as_str } }\n        repository.reload\n        expect(repository.send(attribute)).to eq(!start_value)\n      end\n    end\n\n    # integer attributes\n    [:timeout].each do |attribute|\n      it \"should successfully update the #{attribute} attribute\" do\n        new_value = rand(1440) # max imposed by repository validation\n        patch :update, params: { id: repository.id, repository: { attribute => new_value } }\n        repository.reload\n        expect(repository.send(attribute)).to eq(new_value)\n      end\n    end\n\n    # string attributes\n    [\n      :on_green_update,\n    ].each do |attribute|\n      it \"should successfully update the #{attribute} attribute\" do\n        new_value = \"Keytar Intelligentsia artisan typewriter 3 wolf moon\"\n        patch :update, params: { id: repository.id, repository: { attribute => new_value } }\n        repository.reload\n        expect(repository.send(attribute)).to eq(new_value)\n      end\n    end\n\n    describe \"of convergence branches\" do\n      it \"should set convergence on new branches in the list\" do\n        # branchA already has convergence\n        branchA = FactoryBot.create(:branch, repository: repository, name: 'branchA', convergence: true)\n        # branchB does not have convergence\n        branchB = FactoryBot.create(:branch, repository: repository, name: 'branchB', convergence: false)\n        # branchC does not have convergence\n        branchC = FactoryBot.create(:branch, repository: repository, name: 'branchC', convergence: false)\n        # branchD does not yet exist\n\n        patch :update, params: { id: repository.id, repository: {timeout: 10}, convergence_branches: \"branchA,branchB,branchD\" }\n        expect(branchA.reload).to be_convergence\n        expect(branchB.reload).to be_convergence\n        expect(branchC.reload).to_not be_convergence\n        branchD = repository.branches.where(name: 'branchD').first!\n        expect(branchD).to be_convergence\n      end\n\n      it \"should remove convergence from branches no longer in the list\" do\n        # branchA has convergence\n        branchA = FactoryBot.create(:branch, repository: repository, name: 'branchA', convergence: true)\n        # branchB has convergence\n        branchB = FactoryBot.create(:branch, repository: repository, name: 'branchB', convergence: true)\n        # branchC does not have convergence\n        branchC = FactoryBot.create(:branch, repository: repository, name: 'branchC', convergence: false)\n\n        patch :update, params: { id: repository.id, repository: {timeout: 10}, convergence_branches: \"branchA\" }\n        expect(branchA.reload).to be_convergence\n        expect(branchB.reload).to_not be_convergence\n        expect(branchC.reload).to_not be_convergence\n      end\n    end\n  end\n\n  describe \"delete /repositories/:id\" do\n    let!(:repository) { FactoryBot.create(:repository, :url => \"git@git.example.com:square/kochiku.git\", :test_command => \"script/something\") }\n    it \"responds with success\" do\n      expect {\n        get :destroy, params: { :id => repository.id }\n        expect(response).to be_redirect\n      }.to change(Repository, :count).by(-1)\n    end\n  end\n\n  describe \"get /repositories\" do\n    it \"responds with success\" do\n      get :index\n      expect(response).to be_success\n    end\n  end\n\n  describe \"get /:namespace/:name/edit\" do\n    it \"responds with success\" do\n      get :edit, params: { repository_path: FactoryBot.create(:repository).to_param }\n      expect(response).to be_success\n    end\n  end\n\n  describe \"get /repositories/new\" do\n    it \"responds with success\" do\n      get :new\n      expect(response).to be_success\n    end\n  end\n\n  describe \"get /dashboard\" do\n    let(:repository) { FactoryBot.create(:repository) }\n    let!(:master_branch) { FactoryBot.create(:master_branch, repository: repository) }\n    let!(:non_master_branch) { FactoryBot.create(:branch, :name => 'feature-branch', repository: repository) }\n\n    it \"displays the build status of only the master branches\" do\n      get :dashboard\n      expect(response).to be_success\n      doc = Nokogiri::HTML(response.body)\n      elements = doc.css(\".projects .ci-build-info\")\n      expect(elements.size).to eq(1)\n    end\n  end\n\n  describe 'post /build-ref' do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:fake_sha) { to_40('1') }\n\n    it \"creates a master build with query string parameters\" do\n      post :build_ref, params: { id: repository.id, ref: 'master', sha: fake_sha }\n\n      verify_response_creates_build response, 'master', fake_sha\n    end\n\n    it \"creates a master build with payload\" do\n      post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/master', toHash: fake_sha}] }\n\n      verify_response_creates_build response, 'master', fake_sha\n    end\n\n    it \"creates a branch build with query string parameters\" do\n      post :build_ref, params: { id: repository.id, ref: 'blah', sha: fake_sha }\n\n      verify_response_creates_build response, 'blah', fake_sha\n    end\n\n    it \"creates a branch build with payload\" do\n      post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/blah', toHash: fake_sha}] }\n\n      verify_response_creates_build response, 'blah', fake_sha\n    end\n\n    it \"creates a branch build for a branch name with slashes\" do\n      post :build_ref, params: { id: repository.id, refChanges: [{refId: 'refs/heads/blah/with/a/slash', toHash: fake_sha}] }\n\n      verify_response_creates_build response, 'blah/with/a/slash', fake_sha\n    end\n\n    def verify_response_creates_build(response, branch_name, ref)\n      expect(response).to be_success\n      json       = JSON.parse(response.body)\n      build_hash = json['builds'][0]\n      build      = Build.find(build_hash['id'])\n\n      expect(build_hash['build_url']).not_to be_nil\n\n      expect(build.branch_record.name).to eq(branch_name)\n      expect(build.ref).to eq(ref)\n    end\n\n    context \"a convergence branch\" do\n      let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) }\n\n      it \"should not abort previous in-progress builds\" do\n        earlier_build = FactoryBot.create(:build, state: 'runnable', branch_record: branch)\n\n        post :build_ref, params: { id: repository.id, ref: branch.name, sha: fake_sha }\n        expect(earlier_build.reload.state).to eq('runnable')\n      end\n    end\n\n    context \"not a convergence branch\" do\n      let(:branch) { FactoryBot.create(:branch, repository: repository) }\n\n      it \"should abort all previous in-progress builds\" do\n        earlier_build = FactoryBot.create(:build, state: 'runnable', branch_record: branch)\n\n        post :build_ref, params: { id: repository.id, ref: branch.name, sha: fake_sha }\n        expect(earlier_build.reload.state).to eq('aborted')\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/controllers/status_controller_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe StatusController do\n\n  describe \"#available\" do\n    context \"when the site is available\" do\n      it \"should return 200\" do\n        get :available\n        expect(response.code).to eq(\"200\")\n      end\n    end\n\n    context \"when the site is unavailable\" do\n      it \"should return 503\" do\n        expect(File).to receive(:exist?).and_return(true)\n        get :available\n        expect(response.code).to eq(\"503\")\n      end\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/decorators/branch_decorator_spec.rb",
    "content": "require 'spec_helper'\nrequire 'build'\n\ndescribe BranchDecorator do\n  describe \"#most_recent_build_state\" do\n    let(:branch) { instance_double(\"Branch\") }\n    let(:decorated_branch) { BranchDecorator.new(branch) }\n\n    context \"when at least one build is present\" do\n      before do\n        allow(branch).to receive(:most_recent_build) {\n          instance_double(\"Build\", state: 'running')\n        }\n      end\n\n      it \"returns the state of the most recent build\" do\n        expect(decorated_branch.most_recent_build_state).to eq('running')\n      end\n    end\n\n    context \"there are no builds for the branch\" do\n      before do\n        allow(branch).to receive(:most_recent_build).and_return(nil)\n      end\n\n      it \"returns 'unknown'\" do\n        expect(decorated_branch.most_recent_build_state).to eq('unknown')\n      end\n    end\n  end\n\n  describe \"#last_build_duration\" do\n    let(:branch) { instance_double(\"Branch\") }\n    let(:decorated_branch) { BranchDecorator.new(branch) }\n\n    context \"with a completed build\" do\n      before do\n        allow(branch).to receive(:last_completed_build) {\n          instance_double(\"Build\", state: 'succeeded', elapsed_time: 60)\n        }\n      end\n\n      it \"gets the duration of the last completed build\" do\n        expect(decorated_branch.last_build_duration).to eq(60)\n      end\n    end\n\n    context \"without a completed build\" do\n      before do\n        allow(branch).to receive(:last_completed_build).and_return(nil)\n      end\n\n      it \"returns nil\" do\n        expect(decorated_branch.last_build_duration).to be_nil\n      end\n    end\n  end\n\n  describe '#build_time_history' do\n    subject { decorated_branch.build_time_history }\n\n    let(:branch) do\n      proj = instance_double(\"Branch\")\n      allow(proj).to receive(:timing_data_for_recent_builds) {\n        [\n          @cucumber1 = [\"cucumber\", \"fb25a\", 55, 43, 0, 72550, \"succeeded\", \"2014-03-01 22:45:39 UTC\"],\n          @jasmine1 = [\"jasmine\",  \"fb25a\",  2,  0, 0, 72550, \"succeeded\", \"2014-03-01 22:45:39 UTC\"],\n          @rubocop1 = [\"rubocop\",  \"fb25a\",  3,  0, 0, 72550, \"succeeded\", \"2014-03-01 22:45:39 UTC\"],\n          @cucumber2 = [\"cucumber\", \"f4235\", 55, 44, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n          @jasmine2 = [\"jasmine\",  \"f4235\",  2,  0, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n          @rubocop2 = [\"rubocop\",  \"f4235\",  3,  0, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n        ]\n      }\n      proj\n    end\n    let(:decorated_branch) { BranchDecorator.new(branch) }\n\n    it \"should bucket the builds by type\" do\n      should == {\n        \"cucumber\" => [@cucumber1, @cucumber2],\n        \"jasmine\" => [@jasmine1, @jasmine2],\n        \"rubocop\" => [@rubocop1, @rubocop2],\n      }\n    end\n\n    context 'when the branch has never been built' do\n      let(:branch) { instance_double(\"Branch\", :timing_data_for_recent_builds => []) }\n\n      it { should == {} }\n    end\n\n    context 'when the some build types are missing from builds' do\n      let(:branch) do\n        proj = instance_double(\"Branch\")\n        allow(proj).to receive(:timing_data_for_recent_builds) {\n          [\n            @pants1 = [\"pants\", \"fb25a\", 55, 43, 0, 72550, \"succeeded\", \"2014-03-01 22:45:39 UTC\"],\n            @findbugs1 = [\"findbugs\",  \"fb25a\",  2,  0, 0, 72550, \"succeeded\", \"2014-03-01 22:45:39 UTC\"],\n            @pants2 = [\"pants\",  \"f4235\",  3,  0, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n            @findbugs2 = [\"findbugs\", \"f4235\", 55, 44, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n            @errorprone2 = [\"errorprone\",  \"f4235\",  2,  0, 0, 72560, \"succeeded\", \"2014-03-02 00:37:55 UTC\"],\n            @pants3 = [\"pants\",  \"ef570\",  3,  0, 0, 72568, \"succeeded\", \"2014-03-02 01:23:50 UTC\"],\n          ]\n        }\n        proj\n      end\n\n      it 'should sort the builds and add empty values for missing build parts' do\n        should == {\n          'pants' => [@pants1, @pants2, @pants3],\n          'findbugs' => [@findbugs1, @findbugs2, []],\n          'errorprone' => [[], @errorprone2, []],\n        }\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/decorators/build_part_decorator_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildPartDecorator do\n  describe \"#most_recent_stdout_artifact\" do\n    let(:artifact) { FactoryBot.create(:build_artifact, :log_file => File.open(FIXTURE_PATH + file)) }\n    let(:build_attempt) { artifact.build_attempt }\n    let(:build_part) { BuildPartDecorator.new(build_attempt.build_part) }\n\n    subject { build_part.most_recent_stdout_artifact }\n\n    before do\n      FactoryBot.create(:build_artifact)\n    end\n\n    context \"stdout.log\" do\n      let(:file) { \"stdout.log\" }\n      it { should == artifact }\n    end\n\n    context \"stdout.log.gz\" do\n      let(:file) { \"stdout.log.gz\" }\n      it { should == artifact }\n    end\n\n    context \"not present\" do\n      let(:file) { \"build_artifact.log\" }\n      it { should be_nil }\n    end\n  end\nend\n"
  },
  {
    "path": "spec/features/integration_spec.rb",
    "content": "# coding: utf-8\nrequire \"spec_helper\"\n\nfeature \"viewing an in process build\" do\n  let(:repository) { FactoryBot.create(:repository) }\n  let(:branch) { FactoryBot.create(:master_branch, repository: repository) }\n  let(:build) { FactoryBot.create(:build, branch_record: branch) }\n  let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :queue => 'ci') }\n  let!(:build_attempt) { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'runnable') }\n\n  it \"view the current status of the build attempts\" do\n    build.update_attribute(:state, 'runnable')\n\n    visit('/')\n\n    expect(page).to have_content(repository.name)\n    expect(first(\".ci-build-info .state\")).to have_content(\"Runnable\")\n\n    click_link(repository.name)\n    expect(page).to have_content(build.ref[0, 5])\n    click_link(build.ref[0, 5])\n\n    within(\"table.build-summary\") do\n      expect(find(\"td:nth-child(1)\")).to have_content(build_part.id)\n      expect(find(\"td:nth-child(2)\")).to have_content(\"Runnable\")\n      expect(find(\"td:nth-child(4)\")).to have_content(\"test\")\n      click_link(build_part.id.to_s)\n    end\n\n    expect(find(\".subheader\")).to have_content(\"#{build.ref[0, 7]} – #{build_part.kind} (part #{build_part.id})\")\n\n    expect(all(\".build-part-info tbody tr\").size).to eq(1)\n  end\n\n  it \"should return to the home page when the logo is clicked\" do\n    # visit a deep page\n    visit repository_build_part_path(repository, build, build_part)\n    expect(page).to have_content(\"Runnable on ci queue\")\n\n    click_link(\"Home\")\n\n    expect(current_path).to eq(root_path)\n  end\nend\n\nfeature \"a failed build\" do\n  before :each do\n    @build_attempt = FactoryBot.create(:build_attempt, :state => 'failed')\n    @build_part = @build_attempt.build_part\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n  end\n\n  it \"can be rebuilt\" do\n    build_part_page = repository_build_part_path(@build_part.build_instance.repository, @build_part.build_instance, @build_part)\n    visit(build_part_page)\n    expect(all(\".build-part-info tbody tr\").size).to eq(1)\n    click_link(\"Rebuild\")\n    visit(build_part_page)\n    expect(all(\".build-part-info tbody tr\").size).to eq(2)\n  end\nend\n\nfeature \"requesting a new build of a branch\" do\n  before :each do\n    @repository = FactoryBot.create(:repository, url: \"git@github.com:square/kochiku.git\")\n    @branch_name = \"test/branch\"\n    @branch = FactoryBot.create(:branch, name: @branch_name, repository: @repository)\n    @branch_head_sha = \"4b41fe773057b2f1e2063eb94814d32699a34541\"\n\n    build_ref_info = <<RESPONSE\n{\n  \"ref\": \"refs/heads/#{@branch}\",\n  \"url\": \"#{@repository.base_api_url}/git/refs/heads/#{@branch_name}\",\n  \"object\": {\n    \"sha\": \"#{@branch_head_sha}\",\n    \"type\": \"commit\",\n    \"url\": \"#{@repository.base_api_url}/git/commits/#{@branch_head_sha}\"\n  }\n}\nRESPONSE\n    stub_request(:get, \"#{@repository.base_api_url}/git/refs/heads/#{@branch_name}\").to_return(:status => 200, :body => build_ref_info)\n  end\n\n  it \"creates a new build if a branch is given\" do\n    visit(repository_branch_path(@repository, @branch))\n    click_button('Build')\n    expect(page).to have_content(@branch_head_sha[0..4])\n    expect(find(\".flash.message\")).to have_content(\"New build started for 4b41fe773057b2f1e2063eb94814d32699a34541 on test/branch\")\n  end\nend\n"
  },
  {
    "path": "spec/fixtures/build_artifact.log",
    "content": "Stuff happened. It was awesome."
  },
  {
    "path": "spec/fixtures/sample_github_webhook_payload.json",
    "content": "{\n  \"before\": \"5aef35982fb2d34e9d9d4502f6ede1072793222d\",\n  \"repository\": {\n    \"url\": \"http://github.com/defunkt/github\",\n    \"name\": \"github\",\n    \"description\": \"You're lookin' at it.\",\n    \"watchers\": 5,\n    \"forks\": 2,\n    \"private\": 1,\n    \"owner\": {\n      \"email\": \"chris@ozmm.org\",\n      \"name\": \"defunkt\"\n    }\n  },\n  \"commits\": [\n    {\n      \"id\": \"41a212ee83ca127e3c8cf465891ab7216a705f59\",\n      \"url\": \"http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59\",\n      \"author\": {\n        \"email\": \"chris@ozmm.org\",\n        \"name\": \"Chris Wanstrath\"\n      },\n      \"message\": \"okay i give in\",\n      \"timestamp\": \"2008-02-15T14:57:17-08:00\",\n      \"added\": [\"filepath.rb\"]\n    },\n    {\n      \"id\": \"de8251ff97ee194a289832576287d6f8ad74e3d0\",\n      \"url\": \"http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0\",\n      \"author\": {\n        \"email\": \"chris@ozmm.org\",\n        \"name\": \"Chris Wanstrath\"\n      },\n      \"message\": \"update pricing a tad\",\n      \"timestamp\": \"2008-02-15T14:36:34-08:00\"\n    }\n  ],\n  \"after\": \"de8251ff97ee194a289832576287d6f8ad74e3d0\",\n  \"ref\": \"refs/heads/master\"\n}\n"
  },
  {
    "path": "spec/fixtures/stdout.log",
    "content": "This is stdout."
  },
  {
    "path": "spec/helpers/application_helper_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe ApplicationHelper do\n  include ActionView::Helpers\n  include Haml::Helpers\n\n  let(:repository) { FactoryBot.create(:repository, :url => \"git@git.example.com:square/web.git\") }\n  let(:branch) { FactoryBot.create(:branch, repository: repository, name: \"nomnomnom\") }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n      stash.example.com:\n        type: stash\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  before do\n    @build = Build.new(ref: \"SHA1FORCOMMIT\", branch_record: branch)\n  end\n\n  describe \"#build_success_in_words\" do\n\n    it \"should return success when state = 'succeeded'\" do\n      @build.state = 'succeeded'\n      expect(build_success_in_words(@build)).to eq('success')\n    end\n\n    it \"should return failed when state = 'errored'\" do\n      @build.state = 'errored'\n      expect(build_success_in_words(@build)).to eq('failed')\n    end\n\n    it \"should return failed when state = 'doomed'\" do\n      @build.state = 'doomed'\n      expect(build_success_in_words(@build)).to eq('failed')\n    end\n\n    it \"should return state otherwise\" do\n      @build.state = 'partitioning'\n      expect(build_success_in_words(@build)).to eq('partitioning')\n    end\n  end\n\n  describe \"#link_to_commit\" do\n    it \"should create a link to the github url\" do\n      expect(link_to_commit(@build.repository, @build.ref)).to eq(%{<a href=\"#{show_link_to_commit(@build.repository, @build.ref)}\">SHA1FOR</a>})\n    end\n  end\n\n  describe \"#show_link_to_commit\" do\n    it \"should create a url to github based on config\" do\n      expect(show_link_to_commit(@build.repository, @build.ref)).to eq('https://git.example.com/square/web/commit/SHA1FORCOMMIT')\n    end\n  end\n\n  describe \"#show_link_to_compare\" do\n    let(:branch_stash) { FactoryBot.create(:branch, repository: repository_stash, name: \"okay\") }\n    let(:branch_stash_no_greenupdate) { FactoryBot.create(:branch, repository: repository_stash_no_greenupdate, name: \"okay\") }\n    let(:repository_stash) {\n      FactoryBot.create(:stash_repository, url: \"https://stash.example.com/scm/square/web2.git\",\n                                           host: \"stash.example.com\", namespace: \"square\",\n                                           on_green_update: \"green,red\")\n    }\n    let(:repository_stash_no_greenupdate) {\n      FactoryBot.create(:stash_repository, url: \"https://stash.example.com/scm/square/web3.git\",\n                                           host: \"stash.example.com\",\n                                           namespace: \"square\", on_green_update: \"\")\n    }\n\n    it \"creates a url to github showing the diff between 2 SHAs\" do\n      expect(show_link_to_compare(@build, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://git.example.com/square/web/compare/SHA1FORCOMMIT...SHA2FORCOMMIT#files_bucket')\n    end\n\n    it \"creates a url to stash showing the diff between master and green branches\" do\n      build_stash = Build.new(ref: \"SHA1FORCOMMIT\", branch_record: branch_stash)\n      expect(show_link_to_compare(build_stash, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://stash.example.com/projects/SQUARE/repos/web2/compare/commits?targetBranch=green&sourceBranch=refs%2Fheads%2Fmaster')\n    end\n\n    it \"creates a url to stash showing a comparison with master if no green branch set\" do\n      build_stash_no_greenupdate = Build.new(ref: \"SHA1FORCOMMIT\", branch_record: branch_stash_no_greenupdate)\n      expect(show_link_to_compare(build_stash_no_greenupdate, 'SHA1FORCOMMIT', 'SHA2FORCOMMIT')).to eq('https://stash.example.com/projects/SQUARE/repos/web3/compare/commits?targetBranch=refs%2Fheads%2Fmaster')\n    end\n  end\n\n  describe \"timeago\" do\n    it \"should generate the correct abbr tag\" do\n      timestamp = Time.at(0).utc\n      expect(timeago(timestamp)).to eq(\n        %{<abbr class=\"timeago\" title=\"1970-01-01T00:00:00Z\">#{timestamp}</abbr>}\n      )\n      # the inner_html is not hardcoded because it is timezone dependent\n    end\n  end\nend\n"
  },
  {
    "path": "spec/helpers/build_helper_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildHelper do\n  include ActionView::Helpers\n  include Haml::Helpers\n  let(:build) { FactoryBot.create(:build) }\n\n  describe \"#multiple_ruby_versions?\" do\n    context \"with a ruby build with multiple ruby versions\" do\n      let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) }\n      let!(:build_part2) { FactoryBot.create(:build_part, :build_instance => build, :options => options2) }\n      let(:options) { {\"ruby\" => \"1.9.3-p194\"} }\n      let(:options2) { {\"ruby\" => \"2.0\"} }\n\n      it \"returns true\" do\n        expect(multiple_ruby_versions?(build)).to equal(true)\n      end\n    end\n\n    context \"with a ruby build with only one ruby version\" do\n      let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) }\n      let(:options) { {\"ruby\" => \"1.9.3-p194\"} }\n\n      it \"returns false\" do\n        expect(multiple_ruby_versions?(build)).to equal(false)\n      end\n    end\n\n    context \"with a non-ruby build\" do\n      let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) }\n      let(:options) { {} }\n\n      it \"returns false\" do\n        expect(multiple_ruby_versions?(build)).to equal(false)\n      end\n    end\n  end\n\n  context \"with a ruby build with multiple ruby versions\" do\n    let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :options => options) }\n    let!(:build_part2) { FactoryBot.create(:build_part, :build_instance => build, :options => options2) }\n    let(:options) { {\"ruby\" => \"1.9.3-p194\"} }\n    let(:options2) { {\"ruby\" => \"2.0\"} }\n\n    it \"returns the ruby version info\" do\n      expect(build_metadata_headers(build, true)).to include(\"Ruby Version\")\n      expect(build_metadata_values(build, build_part, true)).to include(\"1.9.3-p194\")\n    end\n  end\n\n  context \"with a build only having one target\" do\n    let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :paths => ['a']) }\n    it \"returns the info\" do\n      expect(build_metadata_headers(build, false)).to eq([\"Target\"])\n      expect(build_metadata_values(build, build_part, false)).to include(\"a\")\n    end\n  end\n\n  context \"with a build with paths\" do\n    let!(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :paths => ['a', 'b']) }\n    it \"returns the info\" do\n      expect(build_metadata_headers(build, false)).to include(\"Paths\")\n      metadata_values = build_metadata_values(build, build_part, false).first\n\n      expect(metadata_values).to start_with(build_part.paths.size.to_s)\n\n      doc = Nokogiri::HTML(metadata_values)\n      node = doc.at_css('span')\n      expect(node['title']).to eq('a, b')\n      expect(node.inner_html).to eq(\"(<b class=\\\"root\\\">a</b>, b)\")\n    end\n  end\n\n  context \"with a build with multiple chunks\" do\n    let!(:build_part) {\n      FactoryBot.create(:build_part, :build_instance => build, :paths => ['a', 'b'],\n                                     :options => {'total_workers' => 5, 'worker_chunk' => 3})\n    }\n\n    it \"displays worker chunk in paths\" do\n      expect(format_paths(build_part)).to eq(\"a - Chunk 3 of 5\")\n    end\n  end\n\n  context \"a build part with no paths (/dev/null)\" do\n    let!(:build_part) { FactoryBot.create(:build_part, build_instance: build, paths: ['/dev/null'], kind: 'lint-check') }\n\n    it \"displays the BuildPart kind instead of /dev/null\" do\n      expect(format_paths(build_part)).to eq(\"lint-check\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/helpers/project_stats_helper_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe ProjectStatsHelper do\n  before do\n    @builds = []\n  end\n\n  def create_some_builds_with_build_attempts(count)\n    count.times do\n      ba = FactoryBot.create(:build_attempt, :state => 'passed')\n      ba.build_instance.update_state_from_parts!\n      @builds << ba.build_instance\n    end\n  end\n\n  describe 'error_free_pass_rate' do\n\n    subject { helper.error_free_pass_rate(@builds) }\n\n    context \"when all attempts passed\" do\n      before { create_some_builds_with_build_attempts(3) }\n      it { should == '100%' }\n    end\n\n    context \"when some parts failed before passing\" do\n      before do\n        create_some_builds_with_build_attempts(3)\n        FactoryBot.create(:build_attempt, :state => 'failed', :build_part => @builds.first.build_parts.first)\n      end\n      it { should == '67%' }\n    end\n\n    context \"when not all parts ever passed\" do\n      before do\n        create_some_builds_with_build_attempts(2)\n        @builds.first.update_attributes! :state => 'failed'\n      end\n      it { should == '50%' }\n    end\n\n    context \"when the latest build part is running\" do\n      before do\n        create_some_builds_with_build_attempts(3)\n        @builds.last.update_attribute(:state, 'running')\n      end\n\n      it \"should not count running build\" do\n        expect(subject).to eq('100%')\n      end\n    end\n  end\n\n  describe 'eventual_pass_rate' do\n\n    subject { helper.eventual_pass_rate(@builds) }\n\n    context \"when all attempts passed\" do\n      before { create_some_builds_with_build_attempts(2) }\n      it { should == '100%' }\n    end\n\n    context \"when some parts failed before passing\" do\n      before do\n        create_some_builds_with_build_attempts(1)\n\n        failed_first = FactoryBot.create(:build_attempt, :state => 'failed')\n        FactoryBot.create(:build_attempt, :state => 'passed', :build_part => failed_first.build_part)\n        failed_first.build_instance.update_state_from_parts!\n        @builds << failed_first.build_instance\n      end\n      it { should == '100%' }\n    end\n\n    context \"when not all parts ever passed\" do\n      before do\n        never_passed = FactoryBot.create(:build_attempt, :state => 'failed')\n        FactoryBot.create(:build_attempt, :state => 'failed', :build_part => never_passed.build_part)\n        never_passed.build_instance.update_state_from_parts!\n        @builds << never_passed.build_instance\n\n        create_some_builds_with_build_attempts(1)\n      end\n      it { should == '50%' }\n    end\n  end\n\n  describe 'pass_rate_text' do\n    subject { helper.pass_rate_text(number) }\n\n    context \"for a perfect score\" do\n      let(:number) { 1.000000 }\n      it { should == '100%' }\n    end\n\n    context \"for a zero score\" do\n      let(:number) { 0 }\n      it { should == '0%' }\n    end\n\n    context \"for a middlin' score\" do\n      let(:number) { 0.421643 }\n      it { should == '42%' }\n    end\n  end\n\n  describe 'average_number_of_rebuilds' do\n    subject { helper.average_number_of_rebuilds(@builds) }\n\n    before do\n      # setup test with successful two builds containing varying build attempts\n      ba = FactoryBot.create(:build_attempt, :state => 'errored')\n      FactoryBot.create(:build_attempt, :state => 'failed', :build_part => ba.build_part)\n      FactoryBot.create(:build_attempt, :state => 'passed', :build_part => ba.build_part)\n      ba.build_instance.update_state_from_parts!\n      @builds << ba.build_instance\n\n      ba = FactoryBot.create(:build_attempt, :state => 'failed')\n      FactoryBot.create(:build_attempt, :state => 'passed', :build_part => ba.build_part)\n      ba.build_instance.update_state_from_parts!\n      @builds << ba.build_instance\n    end\n\n    it { should == 1.5 }\n\n    context 'when there is an unsuccessful build' do\n      before do\n        ba = FactoryBot.create(:build_attempt, :state => 'errored')\n        ba.build_instance.update_state_from_parts!\n        @builds << ba.build_instance\n      end\n\n      it 'should not impact the result' do\n        should == 1.5\n      end\n    end\n  end\n\n  describe 'median_elapsed_time' do\n    subject { helper.median_elapsed_time(@builds) }\n\n    before do\n      5.times do |i|\n        @builds << build = FactoryBot.create(:build, :state => 'succeeded', :created_at => (10 + 5 * i).minutes.ago)\n        build_part = FactoryBot.create(:build_part, :build_instance => build)\n        FactoryBot.create(:build_attempt, :build_part => build_part, :finished_at => build.created_at + (3 * i).minutes)\n      end\n    end\n\n    it { should be_within(1).of(6 * 60) }\n\n    context 'when there is an unsuccessful build' do\n      before do\n        ba = FactoryBot.create(:build_attempt, :state => 'errored')\n        ba.build_instance.update_state_from_parts!\n        @builds << ba.build_instance\n      end\n\n      it 'should not impact the result' do\n        should be_within(1).of(6 * 60)\n      end\n    end\n\n    context 'when there is an even number of builds' do\n      before do\n        @builds << build = FactoryBot.create(:build, :state => 'succeeded', :created_at => 45.minutes.ago)\n        build_part = FactoryBot.create(:build_part, :build_instance => build)\n        FactoryBot.create(:build_attempt, :build_part => build_part, :finished_at => build.created_at + 17.minutes)\n      end\n\n      it 'should average the middle two' do\n        should be_within(1).of((6 + 9) * 30)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/jobs/build_partitioning_job_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildPartitioningJob do\n\n  describe \"#perform\" do\n    subject { BuildPartitioningJob.perform(id) }\n    let(:id) { build.id }\n    let(:build) { FactoryBot.create(:build, :state => 'runnable') }\n    before do\n      allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n    end\n\n    context \"with a job runs successfully\" do\n      before do\n        allow(GitRepo).to receive(:inside_copy).and_yield\n        allow(Build).to receive(:find).with(id).and_return(build)\n        allow(Partitioner).to receive(:for_build).with(build).and_return(partitioner)\n        allow(partitioner).to receive(:partitions).and_return('PARTITIONS')\n        allow(build).to receive(:partition).with('PARTITIONS')\n      end\n\n      let(:partitioner) { double }\n\n      it \"uses the partitioner to partition the build\" do\n        allow(build).to receive(:update_commit_status!)\n        expect(partitioner).to receive(:partitions).and_return('PARTITIONS')\n        expect(build).to receive(:partition).with('PARTITIONS')\n\n        subject\n      end\n\n      it \"with a pull request marks a build as pending\" do\n        expect(GithubRequest).to receive(:post)\n          .with(\"#{build.repository.base_api_url}/statuses/#{build.ref}\",\n                hash_including(:state => 'pending'),\n                anything\n               )\n\n        subject\n      end\n    end\n\n    context \"no test_command specified\" do\n      before do\n        build.repository.update!(test_command: nil)\n      end\n\n      it \"raises an error and fails build\" do\n        expect(GithubRequest).to receive(:post)\n          .with(\"#{build.repository.base_api_url}/statuses/#{build.ref}\",\n                hash_including(:state => 'failure'),\n                anything\n               )\n        subject\n        build.reload\n        expect(build.error_details[:message]).to include(\"No test_command\")\n        expect(build.build_parts.size).to eq(0)\n      end\n    end\n\n    context \"when a non-retryable error occurs\" do\n      error_message = \"A name error occurred\"\n      before { allow(GitRepo).to receive(:load_kochiku_yml).and_raise(NameError.new(error_message)) }\n\n      it \"should re-raise the error and set the build state to errored\" do\n        expect(GithubRequest).to receive(:post)\n          .with(\"#{build.repository.base_api_url}/statuses/#{build.ref}\",\n                hash_including(:state => 'failure'),\n                anything\n               )\n        expect { subject }.to raise_error(NameError)\n        build.reload\n        expect(build.state).to eq('errored')\n        expect(build.error_details[:message]).to eq(error_message)\n        expect(build.error_details[:backtrace]).not_to be_blank\n      end\n    end\n\n    context \"when a retryable error occurs\" do\n      before { allow(GitRepo).to receive(:load_kochiku_yml).and_raise(GitRepo::RefNotFoundError) }\n\n      it \"should re-raise the error and set the build state to waiting for sync\" do\n        expect { subject }.to raise_error(GitRepo::RefNotFoundError)\n        expect(build.reload.state).to eq('waiting_for_sync')\n      end\n    end\n\n    it \"should have an on_failure_retry hook that will re-enqueue the job if it it gets a git ref not found error\" do\n      expect(Resque).to receive(:enqueue_in).with(60, BuildPartitioningJob, id)\n      BuildPartitioningJob.on_failure_retry(GitRepo::RefNotFoundError.new, id)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/jobs/build_state_update_job_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildStateUpdateJob do\n  let(:repository) { FactoryBot.create(:repository, url: 'git@github.com:square/test-repo.git') }\n  let(:branch) { FactoryBot.create(:branch, :repository => repository) }\n  let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) }\n  let(:name) { repository.name + \"_pull_requests\" }\n  let(:current_repo_master) { build.ref }\n\n  before do\n    build.build_parts.create!(:kind => :spec, :paths => [\"foo\", \"bar\"], :queue => 'ci')\n    build.build_parts.create!(:kind => :cucumber, :paths => [\"baz\"], :queue => 'ci')\n    # TODO: This is terrible, need to fold this feedback back into the design.\n    # We are stubbing methods that are not called from the class under test.\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n    allow(GitRepo).to receive(:harmonize_remote_url)\n    allow(GitRepo).to receive(:synchronize_with_remote).and_return(true)\n    allow(GitRepo).to receive(:inside_repo).and_yield\n    mocked_remote_server = RemoteServer.for_url(repository.url)\n    allow(mocked_remote_server).to receive(:sha_for_branch).and_return(current_repo_master)\n    allow(RemoteServer).to receive(:for_url).with(repository.url).and_return(mocked_remote_server)\n    allow(GitBlame).to receive(:last_email_in_branch).and_return(\"example@email.com\")\n    allow(BuildStrategy).to receive(:update_branch)\n    allow(GithubRequest).to receive(:post)\n  end\n\n  shared_examples \"a non promotable state\" do\n    it \"should not promote the build\" do\n      expect(BuildStrategy).not_to receive(:promote_build)\n      BuildStateUpdateJob.perform(build.id)\n    end\n  end\n\n  describe \"#perform\" do\n    it \"updates github when a build passes\" do\n      expect(GithubRequest).to receive(:post)\n        .with(%r|/statuses/#{build.ref}|,\n              hash_including(:state => 'pending'),\n              anything\n             )\n\n      BuildStateUpdateJob.perform(build.id)\n\n      build.build_parts.each do |part|\n        build_attempt = part.build_attempts.create!(:state => 'running')\n        build_attempt.finish!('passed')\n      end\n\n      expect(GithubRequest).to receive(:post)\n        .with(%r|/statuses/#{build.ref}|,\n              hash_including(:state => 'success'),\n              anything\n             )\n\n      BuildStateUpdateJob.perform(build.id)\n    end\n\n    context \"when a build part is still in progress\" do\n      it \"does not kick off a new build unless finished\" do\n        (build.build_parts - [build.build_parts.last]).each do |part|\n          part.build_attempts.create!(state: 'passed')\n        end\n        build.build_parts.last.build_attempts.create!(state: 'running')\n        build.update_state_from_parts!\n\n        expect {\n          BuildStateUpdateJob.perform(build.id)\n        }.to_not change(branch.builds, :count)\n      end\n    end\n\n    context \"when all parts have passed\" do\n      before do\n        build.build_parts.each do |part|\n          part.build_attempts.create!(state: 'passed')\n        end\n        build.update_state_from_parts!\n      end\n\n      describe \"checking for a new commit after finish\" do\n        subject { BuildStateUpdateJob.perform(build.id) }\n\n        it \"doesn't kick off a new build for non convergence branch\" do\n          expect(branch.convergence?).to be(false)\n          expect { subject }.to_not change(branch.builds, :count)\n        end\n\n        context \"with a build on a convergence branch\" do\n          let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) }\n\n          it \"should promote the build\" do\n            expect(BuildStrategy).to receive(:promote_build).with(build)\n            expect(BuildStrategy).not_to receive(:run_success_script)\n            subject\n          end\n\n          context \"new sha is available\" do\n            let(:current_repo_master) { \"new-sha-11111111111111111111111111111111\" }\n\n            it \"builds when there is a new sha to build\" do\n              expect { subject }.to change(branch.builds, :count).by(1)\n              last_build = branch.builds.last\n              expect(last_build.ref).to eq(current_repo_master)\n            end\n\n            it \"kicks off a new build if attempts are running on a part that passed\" do\n              build.build_parts.first.create_and_enqueue_new_build_attempt!\n              expect { subject }.to change(branch.builds, :count).by(1)\n              last_build = branch.builds.last\n              expect(last_build.ref).to eq(current_repo_master)\n            end\n\n            it \"does not kick off a new build if one is already running\" do\n              branch.builds.create!(ref: 'some-other-sha-1111111111111111111111111', state: 'partitioning')\n              expect { subject }.to_not change(branch.builds, :count)\n            end\n\n            it \"does not roll back a build's state\" do\n              new_build = branch.builds.create!(ref: current_repo_master, state: 'failed')\n              expect { subject }.to_not change(branch.builds, :count)\n              expect(new_build.reload.state).to eq('failed')\n            end\n          end\n\n          context \"no new sha\" do\n            it \"does not build\" do\n              expect { subject }.to_not change(branch.builds, :count)\n            end\n          end\n        end\n      end\n\n      it \"kochiku should merge the branch if eligible\" do\n        build.update!(merge_on_success: true)\n        expect(BuildStrategy).to receive(:merge_ref).with(build)\n        BuildStateUpdateJob.perform(build.id)\n      end\n    end\n\n    context \"when there is a success script\" do\n      let(:build) { FactoryBot.create(:build, state: 'succeeded', branch_record: branch) }\n\n      before do\n        kochiku_yaml_config = { 'on_success_script' => 'echo hip hip hooray' }\n        allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yaml_config)\n      end\n\n      it \"runs the success script\" do\n        expect(BuildStrategy).to receive(:run_success_script)\n        BuildStateUpdateJob.perform(build.id)\n      end\n\n      context \"when the success script has been run\" do\n        before do\n          build.on_success_script_log_file = FilelessIO.new(\"test\").tap { |fio| fio.original_filename = \"bar.txt\" }\n          build.save!\n        end\n\n        it \"does not run the success script\" do\n          expect(BuildStrategy).to_not receive(:run_success_script)\n          BuildStateUpdateJob.perform(build.id)\n        end\n      end\n    end\n\n    context \"where this is no success script\" do\n      let(:build) { FactoryBot.create(:build, state: 'succeeded', branch_record: branch) }\n\n      before do\n        kochiku_yaml_config = { }\n        allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yaml_config)\n      end\n\n      it \"does not try to execute a success script\" do\n        expect(BuildStrategy).to_not receive(:run_success_script)\n        BuildStateUpdateJob.perform(build.id)\n      end\n    end\n\n    context \"when a part has failed but some are still running\" do\n      before do\n        build.build_parts.first.build_attempts.create!(:state => 'failed')\n        build.update_state_from_parts!\n      end\n\n      it_behaves_like \"a non promotable state\"\n    end\n\n    context \"when all parts have run and some have failed\" do\n      before do\n        (build.build_parts - [build.build_parts.first]).each do |part|\n          ba = part.build_attempts.create!(:state => 'passed')\n          FactoryBot.create(:stdout_build_artifact, build_attempt: ba)\n        end\n\n        failed_build_attempt = build.build_parts.first.build_attempts.create!(:state => 'failed')\n        FactoryBot.create(:stdout_build_artifact, build_attempt: failed_build_attempt)\n\n        build.update_state_from_parts!\n      end\n\n      it_behaves_like \"a non promotable state\"\n    end\n\n    context \"when no parts\" do\n      before do\n        build.build_parts.destroy_all\n      end\n\n      it \"should not update the state\" do\n        expect {\n          BuildStateUpdateJob.perform(build.id)\n        }.to_not change { build.reload.state }\n      end\n\n      it_behaves_like \"a non promotable state\"\n    end\n  end\nend\n"
  },
  {
    "path": "spec/jobs/enforce_timeouts_job_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe EnforceTimeoutsJob do\n  let(:repo_timeout) { 10 } # minutes\n  let(:repository) { FactoryBot.create(:repository, :url => 'git@github.com:square/test-repo.git', :timeout => repo_timeout) }\n  let(:branch) { FactoryBot.create(:branch, :repository => repository) }\n  let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) }\n  let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, :kind => :cucumber, :paths => ['baz'], :queue => 'ci') }\n\n  subject { EnforceTimeoutsJob.perform }\n\n  before do\n    # Stub needed to test rebuild feature\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n  end\n\n  it 'should mark timed-out builds as errored' do\n    attempt1 = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 7).minutes.ago,\n                                   state: 'running', builder: 'test-worker')\n    attempt2 = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 2).minutes.ago,\n                                   state: 'running', builder: 'test-worker')\n    subject\n    expect(attempt1.reload.state).to eq('errored')\n    expect(attempt2.reload.state).to eq('running')\n  end\n\n  describe \"automatic rebuilds\" do\n    before do\n      @overdue_ba = BuildAttempt.create(build_part_id: build_part.id, started_at: (repo_timeout + 7).minutes.ago,\n                                        state: 'running', builder: 'test-worker')\n    end\n\n    context \"the aborted build attempt is the most recent attempt on the BuildPart\" do\n      it \"should rebuild\" do\n        expect(build_part.build_attempts.last).to eq(@overdue_ba)\n\n        subject\n        build_part.reload\n        expect(build_part.build_attempts.last).to_not eq(@overdue_ba)\n      end\n    end\n\n    context \"the aborted build attempt is not the most recent attempt on the BuildPart\" do\n      before do\n        BuildAttempt.create(build_part_id: build_part.id, started_at: 2.minutes.ago,\n                            state: 'running', builder: 'test-worker')\n      end\n      it \"should not rebuild\" do\n        expect {\n          subject\n        }.to_not change { build_part.build_attempts.count }\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/jobs/poll_repositories_job_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe PollRepositoriesJob do\n\n  subject { PollRepositoriesJob.perform }\n\n  let(:repo) { branch.repository }\n  let!(:branch) { FactoryBot.create(:convergence_branch) }\n\n  before do\n    allow(described_class).to receive(:sleep).and_return(nil)\n\n    @fake_remote_server = double(:sha_for_branch => to_40(\"test_sha\"))\n    allow(RemoteServer).to receive(:for_url).with(repo.url).and_return(@fake_remote_server)\n  end\n\n  it \"will build any new commit\" do\n    subject\n    expect(Build.exists?(:ref => to_40(\"test_sha\"), :branch_id => branch.id)).to be(true)\n  end\n\n  it \"won't build an old commit\" do\n    FactoryBot.create(:build, :branch_record => branch, :ref => to_40(\"test_sha\"))\n    expect { subject }.to_not change{ branch.reload.builds.count }\n  end\n\n  # this likely means the repo has moved/renamed and the url needs to be\n  # updated or has been deleted from the git server\n  it \"disables the repo in Kochiku if the RemoteServer returns a 404\" do\n    allow(@fake_remote_server).to receive(:sha_for_branch).and_raise(RemoteServer::RefDoesNotExist)\n    expect { subject }.to change { repo.reload.enabled? }.from(true).to(false)\n  end\nend\n"
  },
  {
    "path": "spec/jobs/timeout_stuck_builds_job_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe TimeoutStuckBuildsJob do\n  let(:repository) { FactoryBot.create(:repository, url: 'git@github.com:square/test-repo.git', assume_lost_after: 10) }\n  let(:branch) { FactoryBot.create(:branch, :repository => repository) }\n  let(:build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) }\n\n  subject { TimeoutStuckBuildsJob.perform }\n\n  describe \"#perform\" do\n    let(:build_attempt) {\n      build.build_parts.create!(:kind => :spec, :paths => [\"foo\", \"bar\"], :queue => 'ci')\n           .build_attempts.create!(:state => 'running')\n    }\n    let(:build_attempt_2) {\n      build.build_parts.create!(:kind => :cucumber, :paths => [\"baz\"], :queue => 'ci')\n           .build_attempts.create!(:state => 'running')\n    }\n    context \"when a repository has assume_lost_after set\" do\n      it \"should not stop builds that have yet to reach the limit\" do\n        subject\n        expect(build_attempt.reload.state).to eq('running')\n        expect(build_attempt_2.reload.state).to eq('running')\n      end\n\n      it \"should stop builds that have reached the limit\" do\n        expect(build_attempt.state).to eq('running')\n        build_attempt.update_attributes(started_at: 30.minutes.ago)\n        subject\n        expect(build_attempt.reload.state).to eq('errored')\n        expect(build_attempt_2.reload.state).to eq('running')\n      end\n    end\n\n    context \"when a build attempt was created more then 5 minutes ago\" do\n      let(:build_attempt) {\n        build.build_parts.create!(:kind => :cucumber, :paths => [\"baz\"], :queue => 'ci')\n             .build_attempts.create!(:created_at => 10.minutes.ago, :state => 'runnable', :builder => \"test\")\n      }\n\n      it \"should stop a build that is not queued\" do\n        expect(build_attempt.state).to eq('runnable')\n        subject\n        expect(build_attempt.reload.state).to eq('errored')\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/build_strategies/production_build_strategy_spec.rb",
    "content": "require \"spec_helper\"\n# Including the production strategy is potentially dangerous but we stub out command execution.\nrequire \"#{Rails.root}/lib/build_strategies/production_build_strategy.rb\"\n\ndescribe BuildStrategy do\n  let(:branch) { FactoryBot.create(:branch, name: 'funyuns') }\n  let(:build) { FactoryBot.create(:build, branch_record: branch) }\n\n  before(:each) do\n    CommandStubber.new # ensure Open3 is stubbed\n\n    expect(Rails.application.config.action_mailer.delivery_method).to eq(:test)\n  end\n\n  describe \"#merge_ref\" do\n    before do\n      allow(GitRepo).to receive(:inside_copy).and_yield\n    end\n\n    context \"when auto_merge is enabled\" do\n      before do\n        expect(GitBlame).to receive(:emails_in_branch).with(an_instance_of(Build)).and_return(\"the-committers@example.com\")\n      end\n\n      context \"Using a github build\" do\n        it \"should merge to master\" do\n          merger = object_double(GitMergeExecutor.new(build))\n          expect(GitMergeExecutor).to receive(:new).and_return(merger)\n          expect(merger).to receive(:merge_and_push).and_return(merge_commit: to_40('a'), log_output: \"This is not a drill\")\n          expect(merger).to receive(:delete_branch)\n          expect { BuildStrategy.merge_ref(build) }.not_to raise_error\n        end\n\n        it \"should handle merge failure\" do\n          merger = object_double(GitMergeExecutor.new(build))\n          expect(GitMergeExecutor).to receive(:new).and_return(merger)\n          expect(merger).to receive(:merge_and_push).and_raise(GitMergeExecutor::GitMergeFailedError)\n\n          expect(MergeMailer).to receive(:merge_failed).once\n            .and_return(double('mailer', :deliver_now => nil))\n          expect { BuildStrategy.merge_ref(build) }.to_not raise_error\n        end\n      end\n    end\n\n    context \"Using a stash build\" do\n      let(:stash_branch) { FactoryBot.create(:branch, repository: FactoryBot.create(:stash_repository)) }\n      let(:stash_build) { FactoryBot.create(:build, branch_record: stash_branch) }\n\n      before do\n        settings = SettingsAccessor.new(<<-YAML)\n          sender_email_address: kochiku@example.com\n          kochiku_notifications_email_address: test@example.com\n          git_servers:\n            github.com:\n              type: github\n            stash.example.com:\n              type: stash\n          YAML\n        stub_const \"Settings\", settings\n      end\n\n      it \"should merge to master using stash REST api\" do\n        merger = object_double(GitMergeExecutor.new(stash_build))\n        expect(GitMergeExecutor).to receive(:new).and_return(merger)\n        expect(merger).to receive(:merge_and_push).and_return(merge_commit: to_40('a'), log_output: \"This is not a drill\")\n        expect(merger).to receive(:delete_branch)\n\n        expect { BuildStrategy.merge_ref(build) }.not_to raise_error\n      end\n    end\n\n  end\n\n  describe \"#promote_build\" do\n    subject { described_class.promote_build(build) }\n\n    before do\n      allow(GitRepo).to receive(:inside_repo).and_yield\n    end\n\n    context \"when the ref is an ancestor\" do\n      before(:each) {\n        expect(GitRepo).to receive(:included_in_promotion_ref?).and_return(true)\n      }\n      it \"does not perform an update\" do\n        expect(described_class).to_not receive(:update_branch)\n        subject\n      end\n    end\n\n    context \"when the ref is not an ancestor\" do\n      before(:each) {\n        expect(GitRepo).to receive(:included_in_promotion_ref?).and_return(false)\n      }\n      it \"should update the promotion branch\" do\n        expect(described_class).to receive(:update_branch).with(branch.repository.promotion_refs.first, build.ref)\n        subject\n      end\n    end\n  end\n\n  describe \"#update_branch\" do\n    subject {\n      described_class.update_branch('last-green', 'abc123')\n    }\n\n    it \"should promote a sha\" do\n      mock_git_command = double\n      expect(mock_git_command).to receive(:run).and_return \"\"\n      expect(Cocaine::CommandLine).to receive(:new).with(\"git push\", \"--force origin abc123:refs/heads/last-green\").and_return mock_git_command\n\n      subject\n    end\n  end\n\n  describe \"#run_success_script\" do\n    let(:repository) { branch.repository }\n    subject {\n      described_class.run_success_script(build)\n    }\n\n    before do\n      allow(GitRepo).to receive(:inside_copy).and_yield\n      expect(build).to receive(:on_success_script).and_return(\"./this_is_a_triumph\")\n    end\n\n    it \"run success script only once\" do\n      command = double(\"Cocaine::CommandLine\", :run => \"this is some output\\n\", :exit_status => \"255\")\n      allow(Cocaine::CommandLine).to receive(:new).and_return(command)\n      subject\n      expect(build.reload.on_success_script_log_file.read).to eq(\"this is some output\\n\\nExited with status: 255\")\n    end\n  end\n\n  describe \"#on_success_command\" do\n    let(:repository) { branch.repository }\n\n    before do\n      allow(GitRepo).to receive(:inside_copy).and_yield\n      expect(build).to receive(:on_success_script).and_return(\"./this_is_a_triumph\")\n    end\n\n    it \"sets GIT_BRANCH and GIT_COMMIT\" do\n      command = described_class.on_success_command(build)\n      expect(command).to include(\"./this_is_a_triumph\")\n      expect(command).to include(\"GIT_BRANCH=\")\n      expect(command).to include(\"GIT_COMMIT=\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/git_blame_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe GitBlame do\n  let(:build) { FactoryBot.create(:build) }\n\n  describe \"#emails_since_last_green\" do\n    subject { GitBlame.emails_since_last_green(build) }\n\n    context \"with many build breakers, and no git prefix\" do\n      before do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"User One:userone@example.com\", \"User Two:usertwo@example.com\"])\n      end\n\n      it \"returns the emails of the users\" do\n        expect(subject).to eq([\"userone@example.com\", \"usertwo@example.com\"])\n      end\n\n      it \"will not return the same user twice\" do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"User One:userone@example.com\", \"User One:userone@example.com\"])\n        expect(subject).to eq([\"userone@example.com\"])\n      end\n    end\n\n    context \"with a git prefix\" do\n      before do\n        allow(Settings).to receive(:git_pair_email_prefix).and_return(\"git\")\n      end\n\n      it \"should be able to extract a single user\" do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"First Last:git+userone@example.com\"])\n        expect(subject).to eq([\"userone@example.com\"])\n      end\n\n      it \"should be able to extract multiple users\" do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"First Last:git+one+two+three@example.com\"])\n        expect(subject).to eq([\"one@example.com\", \"two@example.com\", \"three@example.com\"])\n      end\n\n      it \"does not affect users with no plus sign\" do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"One:one@example.com\", \"Two:two@foo.example.org\"])\n        expect(subject).to eq([\"one@example.com\", \"two@foo.example.org\"])\n      end\n\n      it \"does not affect an email with a similar format but not starting with the prefix and a plus sign\" do\n        allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return([\"One:github+one+two@example.com\"])\n        expect(subject).to eq([\"github+one+two@example.com\"])\n      end\n    end\n  end\n\n  describe \"#emails_in_branch\" do\n    subject { GitBlame.emails_in_branch(build) }\n\n    after do\n      GitBlame.instance_variable_set(:@people_lookup, nil)\n    end\n\n    context \"with many build breakers\" do\n      before do\n        allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_return([\"User One:userone@example.com\", \"User Two:usertwo@example.com\"])\n      end\n\n      it \"returns the emails of the users\" do\n        expect(subject).to eq([\"userone@example.com\", \"usertwo@example.com\"])\n      end\n    end\n\n    context \"with deleted branch\" do\n      before do\n        allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_call_original\n        allow(GitRepo).to receive(:inside_repo).and_yield\n        allow(GitRepo).to receive(:branch_exist?).and_return(false)\n      end\n\n      it \"should should return []\" do\n        expect(subject).to eq([])\n      end\n    end\n  end\n\n  describe \"#last_email_in_branch\" do\n    subject { GitBlame.last_email_in_branch(build) }\n\n    before do\n      allow(GitBlame).to receive(:last_git_name_and_email_in_branch).and_return(\"User One:userone@example.com\")\n    end\n\n    it \"returns a single email\" do\n      expect(subject).to eq([\"userone@example.com\"])\n    end\n  end\n\n  describe \"#changes_since_last_green\" do\n    subject { GitBlame.changes_since_last_green(build) }\n\n    before do\n      allow(GitBlame).to receive(:changes_since_last_green).and_call_original\n    end\n\n    it \"should parse the git log message and return a hash of information\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::817b88be7488cab5e4f9d9975222db80d8bceb3b|User One <github+uo@squareup.com>|Fri Oct 19 17:43:47 2012 -0700|this is my commit message::!::\")\n      git_changes = subject\n      expect(git_changes.first[:hash]).to eq(\"817b88be7488cab5e4f9d9975222db80d8bceb3b\")\n      expect(git_changes.first[:author]).to eq(\"User One <github+uo@squareup.com>\")\n      expect(git_changes.first[:date]).to eq(\"Fri Oct 19 17:43:47 2012 -0700\")\n      expect(git_changes.first[:message]).to eq(\"this is my commit message\")\n    end\n\n    it \"should strip new lines in the commit message\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::817b88|User One|Fri Oct 19|this is my commit message\\nanother line::!::\")\n      git_changes = subject\n      expect(git_changes.first[:message]).to eq(\"this is my commit message another line\")\n    end\n  end\n\n  describe \"#changes_in_branch\" do\n    subject { GitBlame.changes_in_branch(build) }\n\n    before do\n      allow(GitBlame).to receive(:changes_in_branch).and_call_original\n    end\n\n    it \"should parse the git log message and return a hash of information\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::817b88be7488cab5e4f9d9975222db80d8bceb3b|User One <github+uo@squareup.com>|Fri Oct 19 17:43:47 2012 -0700|this is my commit message::!::\")\n      git_changes = subject\n      expect(git_changes.first[:hash]).to eq(\"817b88be7488cab5e4f9d9975222db80d8bceb3b\")\n      expect(git_changes.first[:author]).to eq(\"User One <github+uo@squareup.com>\")\n      expect(git_changes.first[:date]).to eq(\"Fri Oct 19 17:43:47 2012 -0700\")\n      expect(git_changes.first[:message]).to eq(\"this is my commit message\")\n    end\n  end\n\n  describe \"#files_changed_since_last_build\" do\n    let(:options) { {} }\n    subject { GitBlame.files_changed_since_last_build(build, options) }\n\n    before do\n      allow(GitBlame).to receive(:files_changed_since_last_build).and_call_original\n    end\n\n    it \"should parse the git log and return change file paths\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::User One:userone@example.com::!::\\n\\npath/one/file.java\\npath/two/file.java\")\n      git_file_changes = subject\n      expect(git_file_changes.size).to eq(2)\n      expect(git_file_changes).to include({:file => \"path/one/file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/two/file.java\", :emails => []})\n    end\n\n    context \"includes merge commit modifying files that aren't changed in parent commits\" do\n      # [hack] the git-sha's are not stable on test runs, so we need to use git tags to mark commits\n      let(:previous_build) { instance_double('Build', :ref => \"a\") }\n      before do\n        build.update(:ref => \"d\")\n        allow(build).to receive(:previous_build).and_return previous_build\n        allow(GitRepo).to receive(:inside_repo) do |build, sync_repo, &block|\n          Dir.mktmpdir do |directory|\n            Dir.chdir(directory) do\n              suppressed_git_init\n              `git config user.email \"test@example.com\" && git config user.name \"test\"`\n              FileUtils.touch(\"TESTFILE\")\n              `git add -A && git commit -m \"commit 1\" && git tag a`\n              `git checkout -q -b branch`\n              FileUtils.touch(\"TESTFILE2\")\n              `git add -A && git commit -m \"commit 2\" && git tag b`\n              `git checkout -q -`\n              FileUtils.touch(\"TESTFILE3\")\n              `git add -A && git commit -m \"commit 3\" && git tag c`\n              # --no-commit allows us to change arbitrary files, like in merge conflict resolution\n              `git merge branch --no-ff --no-commit 2> /dev/null`\n              # modify a new file during the merge not modified by parents\n              FileUtils.touch(\"NEWFILE\")\n              `git add -A && git commit -m \"merge commit\" && git tag d`\n              block.call\n            end\n          end\n        end\n      end\n\n      it \"should include all files modified in merge commit\" do\n        git_file_changes = subject\n        expect(git_file_changes).to_not include({:file => \"TESTFILE\", :emails => []})\n        expect(git_file_changes).to include({:file => \"TESTFILE2\", :emails => []})\n        expect(git_file_changes).to include({:file => \"TESTFILE3\", :emails => []})\n        expect(git_file_changes).to include({:file => \"NEWFILE\", :emails => []})\n      end\n    end\n  end\n\n  describe \"#files_changed_since_last_green\" do\n    let(:options) { {} }\n    subject { GitBlame.files_changed_since_last_green(build, options) }\n\n    before do\n      allow(GitBlame).to receive(:files_changed_since_last_green).and_call_original\n    end\n\n    it \"should parse the git log and return change file paths\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::User One:userone@example.com::!::\\n\\npath/one/file.java\\npath/two/file.java\")\n      git_file_changes = subject\n      expect(git_file_changes.size).to eq(2)\n      expect(git_file_changes).to include({:file => \"path/one/file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/two/file.java\", :emails => []})\n    end\n\n    context \"fetch emails with files changes\" do\n      let(:options) { {:fetch_emails => true} }\n\n      it \"should parse the git log and return change file paths with emails\" do\n        allow(GitRepo).to receive(:inside_repo).and_return(\"::!::User One:userone@example.com::!::\\n\\npath/one/file.java\\npath/two/file.java\\n::!::User Two:usertwo@example.com::!::\\n\\npath/three/file.java\")\n        git_file_changes = subject\n        expect(git_file_changes.size).to eq(3)\n        expect(git_file_changes).to include({:file => \"path/one/file.java\", :emails => [\"userone@example.com\"]})\n        expect(git_file_changes).to include({:file => \"path/two/file.java\", :emails => [\"userone@example.com\"]})\n        expect(git_file_changes).to include({:file => \"path/three/file.java\", :emails => [\"usertwo@example.com\"]})\n      end\n\n      it \"should return nothing if the line doesn't have an email\" do\n        allow(GitRepo).to receive(:inside_repo).and_return(\"::!::::!::\\n\")\n        expect(subject).to be_empty\n      end\n    end\n  end\n\n  describe \"#files_changed_in_branch\" do\n    let(:options) { {} }\n    subject { GitBlame.files_changed_in_branch(build, options) }\n\n    before do\n      allow(GitBlame).to receive(:files_changed_in_branch).and_call_original\n    end\n\n    it \"should parse the git log and return change file paths\" do\n      allow(GitRepo).to receive(:inside_repo).and_return(\"::!::User One:userone@example.com::!::\\n\\npath/one/file.java\\npath/two/file.java\")\n      git_file_changes = subject\n      expect(git_file_changes.size).to eq(2)\n      expect(git_file_changes).to include({:file => \"path/one/file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/two/file.java\", :emails => []})\n    end\n  end\n\n  describe \"#net_files_changed_in_branch\" do\n    let(:options) { {} }\n\n    let(:git_merge_base_command) { \"git merge-base master #{build.branch_record.name}\" }\n    let(:git_merge_base_output) { '12345' }\n\n    let(:git_diff_command) { \"git diff --name-status --find-renames --find-copies '12345..#{build.branch_record.name}'\" }\n    let(:git_diff_output) {\n      <<-GITDIFF\nD       path/to/deleted_file.java\nM       path/to/modified_file.java\nA       path/to/added_file.java\nR097    path/to/original_name.java path/to/new_name.java\n    GITDIFF\n    }\n\n    subject { GitBlame.net_files_changed_in_branch(build, options) }\n\n    before do\n      allow(GitBlame).to receive(:net_files_changed_in_branch).and_call_original\n\n      allow(GitRepo).to receive(:inside_repo).and_yield\n\n      diff_double = double('git diff')\n      allow(diff_double).to receive(:run).and_return(git_diff_output)\n      allow(Cocaine::CommandLine).to receive(:new).with(git_diff_command) { diff_double }\n\n      merge_base_double = double('git merge-base')\n      allow(merge_base_double).to receive(:run).and_return(git_merge_base_output)\n      allow(Cocaine::CommandLine).to receive(:new).with(git_merge_base_command) { merge_base_double }\n    end\n\n    it \"should parse the git diff and return change file paths\" do\n      git_file_changes = subject\n      expect(git_file_changes.size).to eq(5)\n      expect(git_file_changes).to include({:file => \"path/to/added_file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/to/deleted_file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/to/modified_file.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/to/original_name.java\", :emails => []})\n      expect(git_file_changes).to include({:file => \"path/to/new_name.java\", :emails => []})\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/git_merge_executor_spec.rb",
    "content": "require 'spec_helper'\nrequire 'git_merge_executor'\n\ndescribe GitMergeExecutor do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      github.com:\n        type: github\n      stash.example.com:\n        type: stash\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  describe \"#merge_and_push\" do\n    let(:build) { FactoryBot.create(:build) }\n    let(:merger) { described_class.new(build) }\n\n    subject { merger.merge_and_push }\n\n    context \"when merge succeeds\" do\n      before(:each) do\n        @stubber = CommandStubber.new\n      end\n\n      it \"should not raise exceptions\" do\n        merge_info = subject\n        expect(merge_info).to have_key(:merge_commit)\n        expect(merge_info).to have_key(:log_output)\n        @stubber.check_cmd_executed(\"git merge\")\n      end\n    end\n\n    context \"when merge fails due to merge conflicts\" do\n      before(:each) do\n        @stubber = CommandStubber.new\n        @stubber.stub_capture2e_failure(\"git merge\")\n      end\n\n      it \"should raise an exception\" do\n        expect { subject }.to raise_error(described_class::GitMergeFailedError)\n      end\n    end\n\n    context \"when push fails it resets the git repo and tries again\" do\n      before(:each) do\n        status_success = double('Process::Status', :success? => true)\n        allow(merger).to receive(:sleep)\n        allow(Open3).to receive(:capture2e).and_return([\"\", status_success])\n      end\n\n      it \"should raise an exception\" do\n        status_failure = double('Process::Status', :success? => false)\n        expect(Open3).to receive(:capture2e).with(/git push/)\n          .and_return([\"\", status_failure])\n          .exactly(3).times\n        expect { subject }.to raise_error(described_class::GitPushFailedError)\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/git_repo_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe GitRepo do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  describe \"#synchronize_with_remote\" do\n    it \"should throw an exception after the third fetch attempt\" do\n      fetch_double = double('git fetch')\n      expect(fetch_double).to receive(:run).exactly(3).times.and_raise(Cocaine::ExitStatusError)\n      allow(Cocaine::CommandLine).to receive(:new).with('git fetch', anything) { fetch_double }\n      expect(GitRepo).to receive(:sleep).exactly(2).times\n      expect { GitRepo.send(:synchronize_with_remote, \"master\") }.to raise_error(Cocaine::ExitStatusError)\n    end\n  end\n\n  describe '#inside_repo' do\n    before do\n      FileUtils.rm_rf GitRepo::WORKING_DIR\n      FileUtils.mkdir GitRepo::WORKING_DIR\n    end\n\n    it 'updates the remote URL of the cached copy if the remote URL has changed' do\n      # Stipulates that the namespace and the name of the repo are still the\n      # same. This is for situations where something else about the url\n      # changed, e.g. switched to a git mirror\n\n      Dir.mktmpdir do |old_remote|\n        Dir.mktmpdir do |new_remote|\n\n          Dir.chdir(old_remote) do\n            suppressed_git_init\n            FileUtils.touch(\"TESTFILE\")\n            `git add -A`\n            `git commit -m \"Initial commit\"`\n          end\n\n          repository = double('Repository',\n                              namespace:        'sq',\n                              name:             'fun-with-flags',\n                              url:              'push-url',\n                              url_for_fetching: old_remote)\n          # Clone the repo first time, prime the cache.\n          GitRepo.inside_repo(repository, sync: false) {}\n\n          # make a copy of the faux remote\n          FileUtils.cp_r(\"#{old_remote}/.\", new_remote, verbose: false)\n\n          allow(repository).to receive(:url_for_fetching).and_return(new_remote)\n\n          # retrieve the updated url of the repository\n          updated_remote = nil\n          GitRepo.inside_repo(repository, sync: false) do\n            updated_remote = `git config --get remote.origin.url`.chomp\n          end\n\n          expect(updated_remote).to eq(new_remote)\n        end\n      end\n    end\n  end\n\n  describe \"#branch_exist?\" do\n    before do\n      allow(GitRepo).to receive(:inside_repo) do |_build, _sync_repo, &block|\n        Dir.mktmpdir do |directory|\n          Dir.chdir(directory) do\n            suppressed_git_init\n            `git config user.email \"test@example.com\" && git config user.name \"test\"`\n            FileUtils.touch(\"TESTFILE\")\n            `git add -A && git commit -m \"commit 1\" && git tag a`\n            block.call\n          end\n        end\n      end\n    end\n\n    it \"should return false for nonexisting branch\" do\n      GitRepo.inside_repo('repo') do\n        expect(GitRepo.branch_exist?('fake_branch')).to be false\n      end\n    end\n\n    it \"should return true for existing branch\" do\n      GitRepo.inside_repo('repo') do\n        expect(GitRepo.branch_exist?('master')).to be true\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/github_commit_status_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe GithubCommitStatus do\n  subject { GithubCommitStatus.new(build, oauth_token) }\n  let(:oauth_token) { \"my_test_token\" }\n  let(:build) { FactoryBot.create(:build) }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n      github.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  it \"marks a build as pending\" do\n    build.update_attributes!(:state => 'running')\n    expect(GithubRequest).to receive(:post)\n      .with(%r|/statuses/#{build.ref}|,\n            hash_including(:state => 'pending'),\n            oauth_token\n           ).and_return(commit_status_response)\n    subject.update_commit_status!\n  end\n\n  it \"marks a build as success\" do\n    build.update_attributes!(:state => 'succeeded')\n    expect(GithubRequest).to receive(:post)\n      .with(%r|/statuses/#{build.ref}|,\n            hash_including(:state => 'success'),\n            oauth_token\n           ).and_return(commit_status_response)\n    subject.update_commit_status!\n  end\n\n  it \"marks a build as failure\" do\n    build.update_attributes!(:state => 'failed')\n    expect(GithubRequest).to receive(:post)\n      .with(%r|/statuses/#{build.ref}|,\n            hash_including(:state => 'failure'),\n            oauth_token\n           ).and_return(commit_status_response)\n    subject.update_commit_status!\n  end\n\n  it \"uses a repos github url\" do\n    build.branch_record.update_attributes!(:repository => FactoryBot.create(:repository, :url => \"git@github.com:square/kochiku-worker.git\"))\n    build.update_attributes!(:state => 'failed')\n    build.reload\n    expect(GithubRequest).to receive(:post)\n      .with(\"https://api.github.com/repos/square/kochiku-worker/statuses/#{build.ref}\",\n            anything, anything).and_return(commit_status_response)\n    subject.update_commit_status!\n  end\n\n  def commit_status_response\n    '{\"description\":\"Build is running\",\"creator\":{\"gravatar_id\":\"56fdde43fb3bd6cf62bbec24dc8cb682\",\"login\":\"nolan\",\"url\":\"https://git.example.com/api/v3/users/nolan\",\"avatar_url\":\"https://secure.gravatar.com/avatar/56fdde43fb3bd6cf62bbec24dc8cb682?d=https://git.example.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png\",\"id\":41},\"updated_at\":\"2012-10-06T02:59:18Z\",\"created_at\":\"2012-10-06T02:59:18Z\",\"state\":\"success\",\"url\":\"https://git.example.com/api/v3/repos/square/web/statuses/22\",\"target_url\":\"https://kochiku.example.com/square/web/builds/5510\",\"id\":22}'\n  end\nend\n"
  },
  {
    "path": "spec/lib/github_post_receive_hook_spec.rb",
    "content": "require 'spec_helper'\nrequire 'github_post_receive_hook'\n\ndescribe GithubPostReceiveHook do\n  subject { GithubPostReceiveHook.new(repository, 'github_oauth_token_test') }\n  let(:repository) { FactoryBot.create(:repository, :url => \"git@git.example.com:square/web.git\") }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  it \"does not recreate the hook if it already exists\" do\n    stub_request(:get, \"https://git.example.com/api/v3/repos/square/web/hooks\")\n      .to_return(:body => github_hooks)\n    subject.subscribe!\n  end\n\n  it \"creates the hook\" do\n    stub_request(:get, \"https://git.example.com/api/v3/repos/square/web/hooks\")\n      .to_return(:body => '[]')\n    stub_request(:post, \"https://git.example.com/api/v3/repos/square/web/hooks\").with do |request|\n      body = JSON.parse(request.body)\n      expect(body[\"name\"]).to eq(\"web\")\n      expect(body[\"events\"]).to eq(['pull_request'])\n      expect(body[\"active\"]).to eq(true)\n      expect(body[\"config\"][\"url\"]).to eq(\"http://localhost:3001/pull-request-builder\")\n      true\n    end.to_return(:body => github_hooks)\n    subject.subscribe!\n  end\n\n  it \"updates a repositories github_post_receive_hook_id\" do\n    expect(repository.github_post_receive_hook_id).to eq(nil)\n    stub_request(:get, \"https://git.example.com/api/v3/repos/square/web/hooks\")\n      .to_return(:body => github_hooks)\n    subject.subscribe!\n    expect(repository.github_post_receive_hook_id).to eq(78)\n  end\n\n  it \"updates an existing hook\" do\n    repository.update_attributes!(:github_post_receive_hook_id => 78)\n    called = false\n    stub_request(:patch, \"https://git.example.com/api/v3/repos/square/web/hooks/78\").with do |request|\n      body = JSON.parse(request.body)\n      expect(body[\"name\"]).to eq(\"web\")\n      expect(body[\"events\"]).to eq(['pull_request'])\n      expect(body[\"active\"]).to eq(true)\n      expect(body[\"config\"][\"url\"]).to eq(\"http://localhost:3001/pull-request-builder\")\n      called = true\n      true\n    end.to_return(:body => github_hooks)\n    subject.subscribe!\n    expect(called).to be true\n  end\n\n  def github_hooks\n    \"[{\\\"active\\\":true,\\\"updated_at\\\":\\\"2012-10-09T19:02:47Z\\\",\\\"last_response\\\":{\\\"status\\\":\\\"unused\\\",\\\"message\\\":null,\\\"code\\\":null},\\\"events\\\":[\\\"pull_request\\\"],\\\"created_at\\\":\\\"2012-10-09T19:02:47Z\\\",\\\"url\\\":\\\"https://git.example.com/api/v3/repos/square/kochiku/hooks/78\\\",\\\"name\\\":\\\"web\\\",\\\"config\\\":{\\\"url\\\":\\\"http://localhost:3001/pull-request-builder\\\"},\\\"id\\\":78}]\"\n  end\nend\n"
  },
  {
    "path": "spec/lib/github_request_spec.rb",
    "content": "require 'spec_helper'\nrequire 'github_request'\n\ndescribe GithubRequest do\n  let(:url) { \"https://git.example.com/api/something-or-other\" }\n  let(:oauth_token) { \"my_test_token\" }\n\n  RSpec.shared_examples \"a github api request\" do\n    it \"should properly include oauth token in header\" do\n      stub_request(http_verb, url).with do |request|\n        expect(request.headers[\"Authorization\"]).to eq(\"token #{oauth_token}\")\n        true\n      end\n\n      subject\n    end\n\n    it \"should specify Github API v3\" do\n      stub_request(http_verb, url).with do |request|\n        expect(request.headers[\"Accept\"]).to eq(\"application/vnd.github.v3+json\")\n        true\n      end\n\n      subject\n    end\n  end\n\n  RSpec.shared_examples \"a modifying github api request\" do\n    it \"should JSON encode request data\" do\n\n      stub_request(http_verb, url).with do |request|\n        body = JSON.parse(request.body)\n        expect(body).to eq(request_data)\n        true\n      end\n\n      subject\n    end\n  end\n\n  describe \".get\" do\n    let(:http_verb) { :get }\n    subject {\n      GithubRequest.get(url, oauth_token)\n    }\n\n    include_examples \"a github api request\"\n  end\n\n  describe \".post\" do\n    let(:http_verb) { :post }\n    let(:request_data) { {\"arg1\" => {\"arg2\" => \"value1\"}} }\n    subject {\n      GithubRequest.post(url, request_data, oauth_token)\n    }\n\n    include_examples \"a github api request\"\n    include_examples \"a modifying github api request\"\n  end\n\n  describe \".patch\" do\n    let(:http_verb) { :patch }\n    let(:request_data) { {\"arg1\" => {\"arg2\" => \"value1\"}} }\n    subject {\n      GithubRequest.patch(url, request_data, oauth_token)\n    }\n\n    include_examples \"a github api request\"\n    include_examples \"a modifying github api request\"\n  end\nend\n"
  },
  {
    "path": "spec/lib/partitioner/default_spec.rb",
    "content": "require 'lib/partitioner/shared_default_behavior'\n\ndescribe Partitioner::Default do\n  include_examples \"Partitioner::Default behavior\", Partitioner::Default\nend\n"
  },
  {
    "path": "spec/lib/partitioner/dependency_map_spec.rb",
    "content": "require 'lib/partitioner/shared_default_behavior'\n\ndescribe Partitioner::DependencyMap do\n  include_examples \"Partitioner::Default behavior\", Partitioner::DependencyMap\n\n  describe '#partitions' do\n    let(:build) { FactoryBot.create(:build) }\n    let(:partitioner) { Partitioner::DependencyMap.new(build, build.kochiku_yml) }\n\n    before do\n      allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml)\n      allow(GitRepo).to receive(:inside_copy).and_yield\n\n      allow(Dir).to receive(:[]) do |*globs|\n        matched_files = []\n\n        matched_files << 'source_glob/1/foo.rb' if globs.include?('source_glob/1/**')\n        matched_files << 'test_glob/1/bar_spec.rb' if globs.include?('test_glob/1/**')\n\n        matched_files << 'source_glob/2_part_1/foo.rb' if globs.include?('source_glob/2_part_1/**')\n        matched_files << 'source_glob/2_part_2/foo.rb' if globs.include?('source_glob/2_part_2/**')\n        matched_files << 'test_glob/2_part_1/bar_spec.rb' if globs.include?('test_glob/2_part_1/**')\n        matched_files << 'test_glob/2_part_2/bar_spec.rb' if globs.include?('test_glob/2_part_2/**')\n\n        matched_files << 'test_glob/default/foo/bar/baz_spec.rb' if globs.include?('test_glob/default/**')\n\n        matched_files << 'glob/foo/bar.rb' if globs.include?('glob/**')\n\n        matched_files\n      end\n\n      allow(GitBlame).to receive(:net_files_changed_in_branch) do\n        changed_files.map { |file| {:file => file, :emails => []} }\n      end\n    end\n\n    let(:changed_files) { [] }\n\n    let(:kochiku_yml) do\n      {\n        'dependency_map_options' => {\n          'run_all_tests_for_branches' => [\n            'master'\n          ]\n        },\n        'targets' => [target]\n      }\n    end\n\n    let(:dependency_map) do\n      [\n        {\n          'source_glob' => 'source_glob/1/**',\n          'test_glob' => 'test_glob/1/**'\n        },\n        {\n          'source_glob' => %w(\n            source_glob/2_part_1/**\n            source_glob/2_part_2/**\n          ),\n          'test_glob' => %w(\n            test_glob/2_part_1/**\n            test_glob/2_part_2/**\n          )\n        }\n      ]\n    end\n\n    subject(:partitions) { partitioner.partitions }\n\n    context 'when dependency_map is defined' do\n      let(:target) do\n        {\n          'type' => 'karma_chrome',\n          'dependency_map' => dependency_map,\n          'default_test_glob' => 'test_glob/default/**'\n        }\n      end\n\n      context 'on a branch where all tests should be run' do\n        let(:build) { FactoryBot.create(:build, :branch_record => FactoryBot.create(:master_branch)) }\n\n        context 'when one of the source_globs matches changed files' do\n          let(:changed_files) { %w(source_glob/1/foo.rb) }\n\n          it 'should add all test globs to partition' do\n            expect(partitions.first['files']).to(\n              eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb test_glob/default/foo/bar/baz_spec.rb))\n            )\n          end\n        end\n\n        context 'when none of the source_globs match the changed files' do\n          let(:changed_files) { %w(does_not_match_any_source) }\n\n          it 'should add all test globs to partition' do\n            expect(partitions.first['files']).to(\n              eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb test_glob/default/foo/bar/baz_spec.rb))\n            )\n          end\n        end\n      end\n\n      context 'on a branch where tests should be isolated' do\n        context 'when one of the source_globs matches changed files' do\n          let(:changed_files) { %w(source_glob/1/foo.rb) }\n\n          it 'adds only the tests for that glob to the partition' do\n            expect(partitions.first['files']).to eq(%w(test_glob/1/bar_spec.rb))\n          end\n\n          context 'but no test_glob is provided' do\n            let(:dependency_map) do\n              [\n                {\n                  'source_glob' => 'source_glob/1/**'\n                }\n              ]\n            end\n\n            it 'does not add any partitions' do\n              expect(partitions).to eq([])\n            end\n          end\n\n        end\n\n        context 'when multiple source_globs match changed files' do\n          let(:changed_files) { %w(source_glob/1/foo.rb source_glob/2_part_1/foo.rb) }\n\n          it 'adds the tests for both globs to the partition' do\n            expect(partitions.first['files']).to(\n              eq(%w(test_glob/1/bar_spec.rb test_glob/2_part_1/bar_spec.rb test_glob/2_part_2/bar_spec.rb))\n            )\n          end\n        end\n\n        context 'when none of the source_globs match the changed files' do\n          let(:changed_files) { %w(does_not_match_any_source) }\n\n          context 'when default_test_glob is defined' do\n            it 'adds only default_test_glob tests to the partition' do\n              expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb))\n            end\n          end\n\n          context 'when default_test_glob is undefined' do\n            let(:target) do\n              {\n                'type' => 'karma_chrome',\n                'dependency_map' => dependency_map\n              }\n            end\n\n            it 'adds no tests to the partition' do\n              expect(partitions).to eq([])\n            end\n          end\n        end\n\n        context 'when workers are defined for the maps' do\n          let(:target) do\n            {\n              'type' => 'karma_chrome',\n              'workers' => 5,\n              'dependency_map' => dependency_map\n            }\n          end\n\n          let(:dependency_map) do\n            [\n              {\n                'source_glob' => 'source_glob/1/**',\n                'test_glob' => 'test_glob/1/**',\n                'workers' => 1\n              },\n              {\n                'source_glob' => %w(\n                  source_glob/2_part_1/**\n                  source_glob/2_part_2/**\n                ),\n                'test_glob' => %w(\n                  test_glob/2_part_1/**\n                  test_glob/2_part_2/**\n                ),\n                'workers' => 1\n              }\n            ]\n          end\n\n          context 'when one of the source_globs matches changed files' do\n            let(:changed_files) { %w(source_glob/1/foo.rb) }\n\n            it 'creates partitions for the number of workers specified by that mapping' do\n              expect(partitions.size).to eq(1)\n            end\n          end\n\n          context 'when multiple source_globs match changed files' do\n            let(:changed_files) { %w(source_glob/1/foo.rb source_glob/2_part_1/foo.rb) }\n\n            it 'creates partitions for the sum of the workers specified by those mappings' do\n              expect(partitions.size).to eq(2)\n            end\n\n            context 'and the max workers for the target is less than the sum of those workers' do\n              let(:target) do\n                {\n                  'type' => 'karma_chrome',\n                  'workers' => 1,\n                  'dependency_map' => dependency_map\n                }\n              end\n\n              it 'creates partitions for the max workers specified by the target' do\n                expect(partitions.size).to eq(1)\n              end\n            end\n          end\n        end\n      end\n    end\n\n    context 'when dependency_map is undefined' do\n      context 'when glob is defined' do\n        context 'when default_test_glob is undefined' do\n          let(:target) do\n            {\n              'type' => 'karma_chrome',\n              'glob' => 'glob/**'\n            }\n          end\n\n          it 'adds tests from glob to partition' do\n            expect(partitions.first['files']).to eq(%w(glob/foo/bar.rb))\n          end\n        end\n\n        context 'when default_test_glob is defined' do\n          let(:target) do\n            {\n              'type' => 'karma_chrome',\n              'glob' => 'glob/**',\n              'default_test_glob' => 'test_glob/default/**'\n            }\n          end\n\n          it 'adds tests from default_test_glob to partition' do\n            expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb))\n          end\n        end\n      end\n\n      context 'when glob is undefined' do\n        context 'when default_test_glob is undefined' do\n          let(:target) do\n            {\n              'type' => 'karma_chrome'\n            }\n          end\n\n          it 'does not make any partitions' do\n            expect(partitions).to eq([])\n          end\n        end\n\n        context 'when default_test_glob is defined' do\n          let(:target) do\n            {\n              'type' => 'karma_chrome',\n              'default_test_glob' => 'test_glob/default/**'\n            }\n          end\n\n          it 'adds tests from default_test_glob to partition' do\n            expect(partitions.first['files']).to eq(%w(test_glob/default/foo/bar/baz_spec.rb))\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/partitioner/go_spec.rb",
    "content": "require 'spec_helper'\nrequire 'partitioner/go'\n\ndescribe Partitioner::Go do\n  let(:repository) { FactoryBot.create(:repository) }\n  let(:branch) { FactoryBot.create(:master_branch, repository: repository, name: \"master\") }\n  let!(:build) { FactoryBot.create(:build, branch_record: branch) }\n  let(:kochiku_yml) { nil }\n\n  subject { Partitioner::Go.new(build, kochiku_yml) }\n\n  before do\n    allow(GitRepo).to receive(:inside_copy).and_yield 'some_dir'\n  end\n\n  context \"with actual files\" do\n\n    let(:go_list_output) {\n      <<~OUTPUT\n        {\n          \"ImportPath\": \"/vendor/test\",\n          \"Deps\": [\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ]\n        }\n        {\n          \"ImportPath\": \"liba\",\n          \"Deps\": [\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ]\n        }\n        {\n          \"ImportPath\": \"libb\",\n          \"Deps\": [\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ]\n        }\n        {\n          \"ImportPath\": \"libc/test\",\n          \"Deps\": [\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ]\n        }\n        {\n          \"ImportPath\": \"libc\",\n          \"Imports\": [\n            \"liba\"\n          ],\n          \"Deps\": [\n            \"liba\",\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ],\n          \"TestImports\": [\n            \"libb\",\n            \"testing\"\n          ]\n        }\n        {\n          \"ImportPath\": \"libd\",\n          \"Imports\": [\n            \"libc\"\n          ],\n          \"Deps\": [\n            \"liba\",\n            \"libc\",\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ],\n          \"TestImports\": [\n            \"libc\",\n            \"testing\"\n          ]\n        }\n        {\n          \"ImportPath\": \"libe\",\n          \"Imports\": [\n            \"libb\"\n          ],\n          \"Deps\": [\n            \"libb\",\n            \"runtime\",\n            \"runtime/internal/atomic\",\n            \"runtime/internal/sys\",\n            \"unsafe\"\n          ],\n          \"XTestImports\": [\n            \"liba\",\n            \"testing\"\n          ]\n        }\n    OUTPUT\n    }\n\n    before do\n      go_list_double = double('go list')\n      allow(go_list_double).to receive(:run).and_return(go_list_output)\n      allow(Cocaine::CommandLine).to receive(:new).and_return(go_list_double)\n    end\n\n    describe \"#package_info_map\" do\n      it \"it should get the package info map\" do\n        pinfo_map = subject.package_info_map\n        expect(pinfo_map[\"liba\"][\"ImportPath\"]).to eq(\"liba\")\n        expect(pinfo_map[\"liba\"][\"Deps\"]).to eq([\"runtime\", \"runtime/internal/atomic\", \"runtime/internal/sys\", \"unsafe\"])\n        expect(pinfo_map[\"libb\"][\"ImportPath\"]).to eq(\"libb\")\n        expect(pinfo_map[\"libc\"][\"Imports\"]).to eq([\"liba\"])\n      end\n    end\n\n    describe \"#module_dependency_map\" do\n      it \"it should get the dependencies\" do\n        dep_map = subject.package_dependency_map\n\n        expect(dep_map[\"liba\"]).to eq(%w[liba libc libe_test].to_set)\n        expect(dep_map[\"libb\"]).to eq(%w[libb libc libe].to_set)\n        expect(dep_map[\"libc\"]).to eq(%w[libc libd].to_set)\n        expect(dep_map[\"libd\"]).to eq(%w[libd].to_set)\n        expect(dep_map[\"libe\"]).to eq(%w[libe].to_set)\n      end\n    end\n\n    describe \"#depends_on_map\" do\n      it \"it should get the dependencies\" do\n        dep_map = subject.depends_on_map\n\n        expect(dep_map[\"liba\"]).to eq(%w[liba libc libd libe_test].to_set)\n        expect(dep_map[\"libb\"]).to eq(%w[libb libc libe].to_set)\n        expect(dep_map[\"libc\"]).to eq(%w[libc libd].to_set)\n        expect(dep_map[\"libd\"]).to eq(%w[libd].to_set)\n        expect(dep_map[\"libe\"]).to eq(%w[libe].to_set)\n      end\n    end\n\n    describe \"#all_packages\"  do\n      it 'should filter /vendor' do\n        expect(subject.all_packages.include?(\"/vendor/test\")).to eq(false)\n        expect(subject.all_packages.include?(\"liba\")).to eq(true)\n      end\n    end\n\n    describe \"#add_partitions\" do\n      it 'should create partitions for all target_types' do\n        partitions = subject.add_partitions(subject.all_packages)\n        expect(partitions.size).to eq(subject.all_packages_target_types.size + subject.top_level_packages_target_types.size)\n      end\n\n    end\n\n    describe \"#package_folders_map\" do\n      it 'should return the packages as folders' do\n        folder_map = subject.package_folders_map(subject.all_packages)\n        expect(folder_map[\"liba\"]).to eq(%w[./liba/])\n        expect(folder_map[\"libc\"]).to eq(%w[./libc/test/ ./libc/])\n      end\n    end\n\n    describe \"#failed_convergence_tests\" do\n      it 'should return an empty array if there is no previous build' do\n        expect(subject.failed_convergence_tests).to eq(%w[])\n      end\n\n      it 'should return the failed paths on a previous build' do\n        failed_build = FactoryBot.create(:completed_build, branch_record: branch, num_build_parts: 1, state: 'failed')\n        allow_any_instance_of(Build).to receive(:previous_build).and_return(failed_build)\n        expect(subject.failed_convergence_tests).to eq(%w[/foo/1.test foo/baz/a.test foo/baz/b.test])\n      end\n    end\n\n    describe \"#file_to_packages\" do\n      it 'should return paths based on a files package dependencies for a .go file' do\n        expect(subject.file_to_packages(\"libb/test.go\")).to eq(%w[libb libc libe])\n      end\n\n      it 'should return the path of the toplevel package for a non .go file' do\n        expect(subject.file_to_packages(\"libb/readme.md\")).to eq(%w[libb])\n      end\n    end\n\n    describe \"#add_with_split\" do\n      it 'should handle an empty package_list' do\n        expect(subject.add_with_split([], \"test\", 2)).to be_nil\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/partitioner/maven_spec.rb",
    "content": "require 'spec_helper'\nrequire 'partitioner/maven'\n\ndescribe Partitioner::Maven do\n  let(:repository) { FactoryBot.create(:repository) }\n  let(:branch) { FactoryBot.create(:master_branch, repository: repository, name: \"master\") }\n  let(:build) { FactoryBot.create(:build, branch_record: branch) }\n  let(:kochiku_yml) { nil }\n\n  subject { Partitioner::Maven.new(build, kochiku_yml) }\n\n  before do\n    allow(GitRepo).to receive(:inside_copy).and_yield\n  end\n\n  describe \"#group_modules\" do\n    it \"should group modules based on the top level directory\" do\n      modules = [\"a\", \"b\", \"b/1\", \"b/2\", \"b/1/2\", \"c/1\"]\n      partitions = subject.group_modules(modules)\n      expect(partitions.size).to eq(3)\n      expect(partitions).to include(a_hash_including({ 'files' => ['a'] }))\n      expect(partitions).to include(a_hash_including({ 'files' => ['b', 'b/1', 'b/1/2', 'b/2'] }))\n      expect(partitions).to include(a_hash_including({ 'files' => ['c/1'] }))\n    end\n\n    context \"with expand_directories\" do\n      let(:kochiku_yml) {\n        {\n          'maven_settings' => {\n            'expand_directories' => ['b'],\n          }\n        }\n      }\n      it \"should break down modules when included in expand_directories\" do\n        modules = [\"a\", \"b\", \"b/elephant\", \"b/elephant/elephant-protos\", \"b/mint\",]\n        partitions = subject.group_modules(modules)\n        expect(partitions.size).to eq(4)\n        expect(partitions).to include(a_hash_including({ 'files' => ['a'] }))\n        expect(partitions).to include(a_hash_including({ 'files' => ['b'] }))\n        expect(partitions).to include(a_hash_including({ 'files' => ['b/elephant', 'b/elephant/elephant-protos'] }))\n        expect(partitions).to include(a_hash_including({ 'files' => ['b/mint'] }))\n      end\n    end\n  end\n\n  describe \"#partitions\" do\n    context \"on a convergence branch\" do\n      before do\n        expect(build.branch_record).to be_convergence\n        allow(subject).to receive(:sort_modules) { |mvn_modules| mvn_modules }\n      end\n\n      context \"for a given set of file changes\" do\n        before do\n          allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything)\n            .and_return([{:file => \"module-one/src/main/java/com/lobsters/foo.java\", :emails => []},\n                         {:file => \"module-two/src/main/java/com/lobsters/bar.java\", :emails => []}])\n          allow(File).to receive(:exist?).and_return(false)\n          allow(File).to receive(:exist?).with(\"module-one/pom.xml\").and_return(true)\n          allow(File).to receive(:exist?).with(\"module-two/pom.xml\").and_return(true)\n\n          allow(subject).to receive(:maven_modules).and_return([\"module-one\", \"module-two\", \"module-two/integration\", \"module-three\", \"module-four\"])\n          allow(subject).to receive(:depends_on_map).and_return(\n            {\n              \"module-one\" => [\"module-one\", \"module-three\", \"module-four\"].to_set,\n              \"module-two\" => [\"module-two\", \"module-two/integration\", \"module-three\"].to_set,\n            }\n          )\n          expect(subject).to_not receive(:all_partitions)\n        end\n\n        it \"should return the set of modules to build\" do\n          partitions = subject.partitions\n\n          expect(partitions.first['type']).to eq('maven') # This should be true for all partitioner actions\n          expect(partitions.first['options']).to_not include('log_file_globs') # Unless log_file_globs is set\n\n          expect(partitions.size).to eq(4)\n          expect(partitions).to include(a_hash_including({ 'files' => ['module-one'] }))\n          expect(partitions).to include(a_hash_including({ 'files' => ['module-two', 'module-two/integration'] }))\n          expect(partitions).to include(a_hash_including({ 'files' => ['module-three'] }))\n          expect(partitions).to include(a_hash_including({ 'files' => ['module-four'] }))\n        end\n\n        context \"multiple workers are specified for a module\" do\n          let(:kochiku_yml) {\n            {\n              'maven_settings' => {\n                'multiple_workers' => {'module-one' => 3}\n              }\n            }\n          }\n\n          it \"should return set of modules to build, with separate entries for each test chunk\" do\n            partitions = subject.partitions\n\n            expect(partitions.size).to eq(6)\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 1}}))\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 2}}))\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-one'], 'options' => {'total_workers' => 3, 'worker_chunk' => 3}}))\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-three'], 'options' => {}}))\n          end\n        end\n\n        context \"with always_build set\" do\n          let(:kochiku_yml) {\n            {\n              'maven_settings' => {\n                'always_build' => ['module-b'],\n              }\n            }\n          }\n\n          it \"should always include the always_build in the partitions\" do\n            partitions = subject.partitions\n            expect(partitions.size).to eq(5)\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-b'] }))\n          end\n        end\n\n        context 'with log_file_globs' do\n          let(:kochiku_yml) { { 'log_file_globs' => log_files } }\n\n          context 'that uses a single string' do\n            let(:log_files) { 'mylog.log' }\n\n            it 'puts an array into the options' do\n              partitions = subject.partitions\n              expect(partitions.first['options']['log_file_globs']).to eq(['mylog.log'])\n            end\n          end\n\n          context 'that uses an array' do\n            let(:log_files) { ['mylog.log', 'another.log'] }\n\n            it 'puts the array into the options' do\n              partitions = subject.partitions\n              expect(partitions.first['options']['log_file_globs']).to eq(['mylog.log', 'another.log'])\n            end\n          end\n        end\n\n        context \"with ignore_paths set\" do\n          let(:kochiku_yml) {\n            {\n              'maven_settings' => {\n                'ignore_paths' => ['module-two'],\n              }\n            }\n          }\n\n          it \"should not return partitions for an ignored directory\" do\n            partitions = subject.partitions\n            expect(partitions.size).to eq(3)\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-one'] }))\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-three'] }))\n            expect(partitions).to include(a_hash_including({ 'files' => ['module-four'] }))\n          end\n        end\n      end\n\n      context \"with a previous build on the same branch\" do\n        let(:build2) { FactoryBot.create(:build, branch_record: branch) }\n        subject { Partitioner::Maven.new(build2, kochiku_yml) }\n\n        it \"should add all the non-successful parts from the previous build\" do\n          build_part = FactoryBot.create(:build_part, :build_instance => build, :paths => [\"module-one\"])\n          expect(build.build_parts.first).to be_unsuccessful\n\n          partitions = subject.partitions\n          expect(partitions.size).to eq(1)\n          expect(partitions).to include(\n            a_hash_including(\"files\" => build_part.paths, \"queue\" => build_part.queue.to_s)\n          )\n        end\n\n        it \"should not add all successful parts from the previous build\" do\n          build_part = FactoryBot.create(:build_part, :build_instance => build, :paths => [\"module-one\"])\n          FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed')\n          expect(build.build_parts.first).to be_successful\n\n          partitions = subject.partitions\n          expect(partitions.size).to eq(0)\n        end\n      end\n\n      context \"with build_everything set\" do\n        let(:kochiku_yml) {\n          {\n            'maven_settings' => {\n              'build_everything' => ['build-all'],\n            }\n          }\n        }\n\n        it \"should build everything if one of the changed file starts with a path in build_everything\" do\n          allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything)\n            .and_return([{:file => \"build-all/src/main/java/com/lobsters/foo.java\", :emails => []}])\n          allow(File).to receive(:exist?).and_return(false)\n          allow(subject).to receive(:pom_for).and_return \"\"\n\n          allow(subject).to receive(:maven_modules).and_return([\"module-one\", \"build-all\"])\n          allow(subject).to receive(:depends_on_map).and_return(\n            {\n              \"module-one\" => [\"module-one\", \"build-all\"].to_set,\n              \"build-all\" => [\"build-all\"].to_set,\n            }\n          )\n\n          expect(subject).to receive(:all_partitions).and_return([{\"type\" => \"maven\", \"files\" => \"ALL\"}])\n\n          partitions = subject.partitions\n          expect(partitions.size).to eq(1)\n          expect(partitions.first).to match('type' => 'maven', 'files' => 'ALL', 'options' => {})\n        end\n      end\n\n      it \"should build everything if one of the files does not map to a module\" do\n        allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything)\n          .and_return([{:file => \"toplevel/foo.xml\", :emails => []}])\n\n        allow(subject).to receive(:depends_on_map).and_return(\n          {\n            \"module-one\" => [\"module-one\", \"module-three\", \"module-four\"].to_set,\n            \"module-two\" => [\"module-two\", \"module-three\"].to_set\n          }\n        )\n\n        expect(subject).to receive(:all_partitions).and_return([{\"type\" => \"maven\", \"files\" => \"ALL\"}])\n\n        partitions = subject.partitions\n        expect(partitions.size).to eq(1)\n        expect(partitions.first).to match('type' => 'maven', 'files' => 'ALL', 'options' => {})\n      end\n\n      it \"should not fail if a file is referenced in a top level module that is not in the top level pom\" do\n        allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything)\n          .and_return([{:file => \"new-module/src/main/java/com/lobsters/foo.java\", :emails => []}])\n\n        allow(File).to receive(:exist?).and_return(false)\n        allow(File).to receive(:exist?).with(\"new-module/pom.xml\").and_return(true)\n\n        allow(subject).to receive(:maven_modules).and_return([\"module-one\", \"module-two\"])\n        allow(subject).to receive(:depends_on_map).and_return(\n          {\n            \"module-one\" => [\"module-one\", \"module-three\", \"module-four\"].to_set,\n            \"module-two\" => [\"module-two\", \"module-three\"].to_set\n          }\n        )\n        expect(subject).to_not receive(:all_partitions)\n\n        partitions = subject.partitions\n        expect(partitions.size).to eq(0)\n      end\n\n      context \"with options\" do\n        let(:kochiku_yml) { { 'log_file_globs' => 'mylog.log', 'retry_count' => 5 } }\n\n        it \"should include options in the event of a partial build\" do\n          allow(GitBlame).to receive(:files_changed_since_last_build).with(build, sync: anything)\n            .and_return([{:file => \"toplevel/foo.xml\", :emails => []}])\n\n          expect(subject).to receive(:all_partitions).and_return([{\"type\" => \"maven\", \"files\" => \"ALL\"}])\n\n          partitions = subject.partitions\n          expect(partitions.size).to be > 0\n          expect(partitions.first).to match a_hash_including('options' =>\n              {'log_file_globs' => ['mylog.log'], 'retry_count' => 5})\n        end\n      end\n    end\n\n    context \"on a non-convergence branch\" do\n      let(:branch) { FactoryBot.create(:branch, convergence: false) }\n      # let(:build) { FactoryBot.create(:build, :branch => \"branch-of-master\") }\n\n      before do\n        expect(build.branch_record).to_not be_convergence\n      end\n\n      context \"with a previous build\" do\n        let(:build2) { FactoryBot.create(:build, branch_record: FactoryBot.create(:master_branch)) }\n        subject { Partitioner::Maven.new(build2, kochiku_yml) }\n\n        it \"should NOT add all the non-successful parts from the previous build\" do\n          FactoryBot.create(:build_part, :build_instance => build, :paths => [\"module-one\"])\n\n          expect(build.build_parts.first).to be_unsuccessful\n          allow(subject).to receive(:sort_modules) { |mvn_modules| mvn_modules }\n\n          partitions = subject.partitions\n          expect(partitions.size).to eq(0)\n        end\n      end\n    end\n  end\n\n  describe \"#emails_for_commits_causing_failures\" do\n    it \"should return nothing if there are no failed parts\" do\n      expect(build.build_parts.failed_or_errored).to be_empty\n      emails = subject.emails_for_commits_causing_failures\n      expect(emails).to be_empty\n    end\n\n    context \"with a module that failed to build\" do\n      before do\n        build_part = FactoryBot.create(:build_part, :paths => [\"failed-module\"], :build_instance => build)\n        FactoryBot.create(:build_attempt, :state => 'failed', :build_part => build_part)\n        expect(build.build_parts.failed_or_errored).to eq([build_part])\n\n        allow(GitRepo).to receive(:inside_copy).and_yield\n        expect(subject).to_not receive(:all_partitions)\n      end\n\n      it \"should return the emails for the modules that are failing\" do\n        allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true)\n          .and_return([{:file => \"module-one/src/main/java/com/lobsters/Foo.java\", :emails => [\"userone@example.com\"]},\n                       {:file => \"module-two/src/main/java/com/lobsters/Bar.java\", :emails => [\"usertwo@example.com\"]},\n                       {:file => \"failed-module/src/main/java/com/lobsters/Baz.java\", :emails => [\"userfour@example.com\"]},\n                       {:file => \"failed-module/src/main/java/com/lobsters/Bing.java\", :emails => [\"userfour@example.com\"]}])\n        allow(File).to receive(:exist?).and_return(false)\n        allow(File).to receive(:exist?).with(\"module-one/pom.xml\").and_return(true)\n        allow(File).to receive(:exist?).with(\"module-two/pom.xml\").and_return(true)\n        allow(File).to receive(:exist?).with(\"failed-module/pom.xml\").and_return(true)\n\n        allow(subject).to receive(:depends_on_map).and_return(\n          {\n            \"module-one\" => [\"module-one\", \"module-three\", \"failed-module\"].to_set,\n            \"module-two\" => [\"module-two\", \"module-three\"].to_set,\n            \"failed-module\" => [\"failed-module\"].to_set\n          }\n        )\n\n        email_and_files = subject.emails_for_commits_causing_failures\n        expect(email_and_files.size).to eq(2)\n        expect(email_and_files[\"userone@example.com\"]).to eq([\"module-one/src/main/java/com/lobsters/Foo.java\"])\n        expect(email_and_files[\"userfour@example.com\"].size).to eq(2)\n        expect(email_and_files[\"userfour@example.com\"]).to include(\"failed-module/src/main/java/com/lobsters/Baz.java\")\n        expect(email_and_files[\"userfour@example.com\"]).to include(\"failed-module/src/main/java/com/lobsters/Bing.java\")\n      end\n\n      context \"with ignore_paths set\" do\n        let(:kochiku_yml) {\n          {\n            'maven_settings' => {\n              'ignore_paths' => ['ignored-module'],\n            }\n          }\n        }\n\n        it \"should not return emails if changes are on an ignored path and not in the dependency map\" do\n          allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true)\n            .and_return([{:file => \"ignored-module/src/main/java/com/lobsters/Foo.java\", :emails => [\"userone@example.com\"]},\n                         {:file => \"failed-module/src/main/java/com/lobsters/Bing.java\", :emails => [\"userfour@example.com\"]}])\n          allow(File).to receive(:exist?).and_return(false)\n          allow(File).to receive(:exist?).with(\"ignored-module/pom.xml\").and_return(true)\n          allow(File).to receive(:exist?).with(\"failed-module/pom.xml\").and_return(true)\n\n          allow(subject).to receive(:depends_on_map).and_return({\n                                                                  \"ignored-module\" => [\"ignored-module\"].to_set,\n                                                                  \"failed-module\" => [\"failed-module\"].to_set\n                                                                })\n\n          email_and_files = subject.emails_for_commits_causing_failures\n          expect(email_and_files.size).to eq(1)\n          expect(email_and_files[\"userfour@example.com\"]).to eq([\"failed-module/src/main/java/com/lobsters/Bing.java\"])\n        end\n\n        it \"should return emails if changes are on an ignored path but are in the dependency map\" do\n          allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true)\n            .and_return([{:file => \"ignored-module/src/main/java/com/lobsters/Foo.java\", :emails => [\"userone@example.com\"]},\n                         {:file => \"failed-module/src/main/java/com/lobsters/Bing.java\", :emails => [\"userfour@example.com\"]}])\n          allow(File).to receive(:exist?).and_return(false)\n          allow(File).to receive(:exist?).with(\"ignored-module/pom.xml\").and_return(true)\n          allow(File).to receive(:exist?).with(\"failed-module/pom.xml\").and_return(true)\n\n          allow(subject).to receive(:depends_on_map).and_return({\n                                                                  \"ignored-module\" => [\"ignored-module\", \"failed-module\"].to_set,\n                                                                  \"failed-module\" => [\"failed-module\"].to_set\n                                                                })\n\n          email_and_files = subject.emails_for_commits_causing_failures\n          expect(email_and_files.size).to eq(2)\n          expect(email_and_files[\"userone@example.com\"]).to eq([\"ignored-module/src/main/java/com/lobsters/Foo.java\"])\n          expect(email_and_files[\"userfour@example.com\"]).to eq([\"failed-module/src/main/java/com/lobsters/Bing.java\"])\n        end\n      end\n\n      context \"with build_everything set\" do\n        let(:kochiku_yml) {\n          {\n            'maven_settings' => {\n              'build_everything' => ['build-all'],\n            }\n          }\n        }\n\n        it \"should return email for change to build_everything even if build_everything module does not depend on changed file\" do\n          allow(GitBlame).to receive(:files_changed_since_last_green).with(build, fetch_emails: true)\n            .and_return([{:file => \"build-all/src/main/java/com/lobsters/Foo.java\", :emails => [\"userone@example.com\"]},\n                         {:file => \"module-four/src/main/java/com/lobsters/Bar.java\", :emails => [\"userfour@example.com\"]}])\n          allow(File).to receive(:exist?).and_return(false)\n          allow(File).to receive(:exist?).with(\"build-all/pom.xml\").and_return(true)\n          allow(File).to receive(:exist?).with(\"failed-module/pom.xml\").and_return(true)\n\n          allow(subject).to receive(:depends_on_map).and_return({\n                                                                  \"build-all\" => [\"build-all\"].to_set,\n                                                                  \"module-four\" => [\"module-four\"].to_set\n                                                                })\n\n          email_and_files = subject.emails_for_commits_causing_failures\n          expect(email_and_files.size).to eq(2)\n          expect(email_and_files[\"userone@example.com\"]).to eq([\"build-all/src/main/java/com/lobsters/Foo.java\"])\n          expect(email_and_files[\"userfour@example.com\"]).to eq([\"module-four/src/main/java/com/lobsters/Bar.java\"])\n        end\n      end\n    end\n  end\n\n  describe \"#sort_modules\" do\n    before do\n      allow(subject).to receive(:module_dependency_map).and_return({\n                                                                     \"module-one\" => [\"module-two\"].to_set,\n                                                                     \"module-two\" => [\"module-three\"].to_set,\n                                                                     \"module-three\" => [\"module-four\"].to_set,\n                                                                     \"module-four\" => Set.new,\n                                                                     \"module-five\" => Set.new,\n                                                                   })\n    end\n\n    it \"should sort the modules based on a topological sort of the dependency map\" do\n      sorted_modules = subject.sort_modules([\"module-one\", \"module-three\", \"module-four\", \"module-two\"])\n      expect(sorted_modules).to eq([\"module-four\", \"module-three\", \"module-two\", \"module-one\"])\n    end\n\n    it \"should sort partial module lists that depend on each other\" do\n      sorted_modules = subject.sort_modules([\"module-two\", \"module-three\"])\n      expect(sorted_modules).to eq([\"module-three\", \"module-two\"])\n    end\n\n    it \"should sort partial module lists of one\" do\n      expect(subject.sort_modules([\"module-two\"])).to eq([\"module-two\"])\n    end\n\n    it \"should sort empty module lists\" do\n      expect(subject.sort_modules([])).to eq([])\n    end\n  end\n\n  describe \"#depends_on_map\" do\n    it \"should convert a dependency map to a depends on map\" do\n      allow(subject).to receive(:transitive_dependency_map).and_return({\n                                                                         \"module-one\" => [\"a\", \"b\"].to_set,\n                                                                         \"module-two\" => [\"b\", \"c\", \"module-one\"].to_set,\n                                                                         \"module-three\" => Set.new\n                                                                       })\n\n      depends_on_map = subject.depends_on_map\n\n      expect(depends_on_map[\"module-one\"]).to eq([\"module-one\", \"module-two\"].to_set)\n      expect(depends_on_map[\"module-two\"]).to eq([\"module-two\"].to_set)\n      expect(depends_on_map[\"module-three\"]).to eq([\"module-three\"].to_set)\n      expect(depends_on_map[\"a\"]).to eq([\"a\", \"module-one\"].to_set)\n      expect(depends_on_map[\"b\"]).to eq([\"b\", \"module-one\", \"module-two\"].to_set)\n      expect(depends_on_map[\"c\"]).to eq([\"c\", \"module-two\"].to_set)\n    end\n  end\n\n  context \"with actual files\" do\n    let(:top_level_pom) { <<-POM\n<project>\n  <modules>\n    <module>module-one</module>\n    <module>module-two</module>\n    <module>module-three</module>\n  </modules>\n</project>\n    POM\n    }\n\n    let(:module_one_pom) { <<-POM\n<project>\n  <properties>\n    <deployableBranch>one-branch</deployableBranch>\n  </properties>\n\n  <groupId>com.lobsters</groupId>\n  <artifactId>module-core</artifactId>\n\n  <dependencies>\n    <dependency>\n      <groupId>com.lobsters</groupId>\n      <artifactId>module-extras</artifactId>\n    </dependency>\n    <dependency>\n      <groupId>junit</groupId>\n      <artifactId>junit</artifactId>\n    </dependency>\n  </dependencies>\n</project>\n    POM\n    }\n\n    let(:module_two_pom) { <<-POM\n<project>\n  <properties>\n    <deployableBranch>two-branch</deployableBranch>\n  </properties>\n\n  <groupId>com.lobsters</groupId>\n  <artifactId>module-extras</artifactId>\n\n  <dependencies>\n    <dependency>\n      <groupId>com.lobsters</groupId>\n      <artifactId>module-three</artifactId>\n    </dependency>\n  </dependencies>\n</project>\n    POM\n    }\n\n    let(:module_three_pom) { <<-POM\n<project>\n  <groupId>com.lobsters</groupId>\n  <artifactId>module-three</artifactId>\n\n  <dependencies>\n    <dependency>\n      <groupId>junit</groupId>\n      <artifactId>junit</artifactId>\n    </dependency>\n  </dependencies>\n</project>\n    POM\n    }\n\n    let(:module_four_pom) { <<-POM\n<project>\n  <properties>\n    <deployableBranch>super-branch</deployableBranch>\n  </properties>\n\n  <groupId>com.lobsters</groupId>\n  <artifactId>module-four</artifactId>\n\n  <dependencies>\n    <dependency>\n      <groupId>com.lobsters</groupId>\n      <artifactId>super-module</artifactId>\n    </dependency>\n  </dependencies>\n</project>\n    POM\n    }\n\n    before do\n      allow(File).to receive(:read).with(Partitioner::Maven::POM_XML).and_return(top_level_pom)\n      allow(File).to receive(:read).with(\"module-one/pom.xml\").and_return(module_one_pom)\n      allow(File).to receive(:read).with(\"module-two/pom.xml\").and_return(module_two_pom)\n      allow(File).to receive(:read).with(\"module-three/pom.xml\").and_return(module_three_pom)\n    end\n\n    describe \"#module_dependency_map\" do\n      it \"it should get the dependencies from a pom\" do\n        dependency_map = subject.module_dependency_map\n\n        expect(dependency_map[\"module-one\"]).to eq([\"module-two\"].to_set)\n        expect(dependency_map[\"module-two\"]).to eq([\"module-three\"].to_set)\n        expect(dependency_map[\"module-three\"]).to eq(Set.new)\n      end\n\n      context \"with a dependency missing a groupId\" do\n        let(:module_three_pom) { <<-POM\n<project>\n  <groupId>com.lobsters</groupId>\n  <artifactId>module-three</artifactId>\n\n  <dependencies>\n    <dependency>\n      <artifactId>junit</artifactId>\n    </dependency>\n  </dependencies>\n</project>\n        POM\n        }\n\n        it \"it should return a useful error message\" do\n          expect { subject.module_dependency_map }.to raise_error(\"dependency in module-three/pom.xml is missing an artifactId or groupId\")\n        end\n      end\n    end\n\n    describe \"#transitive_dependency_map\" do\n      it \"it should get the transitive dependencies from a pom\" do\n        dependency_map = subject.transitive_dependency_map\n\n        expect(dependency_map[\"module-one\"]).to eq([\"module-one\", \"module-two\", \"module-three\"].to_set)\n        expect(dependency_map[\"module-two\"]).to eq([\"module-two\", \"module-three\"].to_set)\n        expect(dependency_map[\"module-three\"]).to eq([\"module-three\"].to_set)\n      end\n    end\n  end\n\n  describe \"#transitive_dependencies\" do\n    it \"should return the module in a set as a base case\" do\n      expect(subject.transitive_dependencies(\"module-one\", {\"module-one\" => Set.new})).to eq([\"module-one\"].to_set)\n    end\n\n    it \"should work for the recursive case\" do\n      dependency_map = {\n        \"module-one\" => %w(a b c).to_set,\n        \"a\" => [\"d\"].to_set,\n        \"b\" => [\"d\", \"e\"].to_set,\n        \"c\" => Set.new,\n        \"d\" => [\"e\"].to_set,\n        \"e\" => Set.new,\n        \"f\" => Set.new\n      }\n\n      transitive_map = subject.transitive_dependencies(\"module-one\", dependency_map)\n      expect(transitive_map).to eq([\"module-one\", \"a\", \"b\", \"c\", \"d\", \"e\"].to_set)\n    end\n  end\n\n  describe \"#file_to_module\" do\n    before do\n      allow(File).to receive(:exist?).and_return(false)\n    end\n\n    it \"should return the module for a src main path\" do\n      allow(File).to receive(:exist?).with(\"oyster/pom.xml\").and_return(true)\n      expect(subject.file_to_module(\"oyster/src/main/java/com/lobsters/oyster/OysterApp.java\")).to eq(\"oyster\")\n    end\n\n    it \"should return the module in a subdirectory\" do\n      allow(File).to receive(:exist?).with(\"gateways/cafis/pom.xml\").and_return(true)\n      expect(subject.file_to_module(\"gateways/cafis/src/main/java/com/lobsters/gateways/cafis/data/DataField_9_6_1.java\"))\n        .to eq(\"gateways/cafis\")\n    end\n\n    it \"should return the module for a src test path even if there is pom in the parent directory\" do\n      allow(File).to receive(:exist?).with(\"integration/hibernate/pom.xml\").and_return(true)\n      allow(File).to receive(:exist?).with(\"integration/hibernate/tests/pom.xml\").and_return(true)\n      expect(subject.file_to_module(\"integration/hibernate/tests/src/test/java/com/lobsters/integration/hibernate/ConfigurationExtTest.java\"))\n        .to eq(\"integration/hibernate/tests\")\n    end\n\n    it \"should return a module for a pom change\" do\n      allow(File).to receive(:exist?).with(\"common/pom.xml\").and_return(true)\n      expect(subject.file_to_module(\"common/pom.xml\")).to eq(\"common\")\n    end\n\n    it \"should return nil for a toplevel change\" do\n      expect(subject.file_to_module(\"pom.xml\")).to be_nil\n      expect(subject.file_to_module(\"Gemfile.lock\")).to be_nil\n      expect(subject.file_to_module(\"non_maven_dependencies/README\")).to be_nil\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/partitioner/shared_default_behavior.rb",
    "content": "require 'spec_helper'\nrequire 'partitioner'\n\nRSpec.shared_examples \"Partitioner::Default behavior\" do |partitioner_class|\n  let(:build) { FactoryBot.create(:build) }\n  let(:partitioner) { partitioner_class.new(build, build.kochiku_yml) }\n\n  before do\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml)\n    allow(GitRepo).to receive(:inside_copy).and_yield\n  end\n\n  let(:kochiku_yml) do\n    {\n      \"targets\" => [\n        {\n          'type' => 'rspec',\n          'glob' => 'spec/**/*_spec.rb',\n          'workers' => 3,\n          'balance' => rspec_balance,\n          'manifest' => rspec_manifest,\n          'time_manifest' => rspec_time_manifest,\n        },\n        {\n          'type' => 'cuke',\n          'glob' => 'features/**/*.feature',\n          'workers' => 3,\n          'balance' => cuke_balance,\n          'manifest' => cuke_manifest,\n          'time_manifest' => cuke_time_manifest,\n        }\n      ]\n    }\n  end\n\n  let(:rspec_balance) { 'alphabetically' }\n  let(:rspec_manifest) { nil }\n  let(:cuke_balance) { 'alphabetically' }\n  let(:cuke_manifest) { nil }\n  let(:rspec_time_manifest) { nil }\n  let(:cuke_time_manifest) { nil }\n\n  describe '#emails_for_commits_causing_failures' do\n    subject { partitioner.emails_for_commits_causing_failures }\n    it \"should return a hash\" do\n      expect(subject).to be_a(Hash)\n    end\n  end\n\n  describe '#partitions' do\n    subject { partitioner.partitions }\n\n    context \"with a kochiku.yml that does not use Ruby\" do\n      let(:kochiku_yml) do\n        {\n          \"targets\" => [\n            {\n              'type' => 'other',\n              'glob' => 'spec/**/*_spec.rb',\n              'workers' => 1,\n            }\n          ]\n        }\n      end\n\n      it \"should not include a ruby version\" do\n        partitions = subject\n        expect(partitions.size).to be(1)\n        expect(partitions.first[\"type\"]).to eq(\"other\")\n        expect(partitions.first[\"files\"]).not_to be_empty\n        expect(partitions.first[\"options\"]).not_to have_key(\"ruby\")\n      end\n    end\n\n    context \"with a ruby-based kochiku.yml\" do\n      let(:queue_override) { nil }\n      let(:retry_count) { nil }\n      let(:kochiku_yml) do\n        {\n          \"ruby\" => [\"ree-1.8.7-2011.12\"],\n          \"targets\" => [\n            {\n              'type' => 'rspec',\n              'glob' => 'spec/**/*_spec.rb',\n              'workers' => 3,\n              'balance' => rspec_balance,\n              'manifest' => rspec_manifest,\n              'queue_override' => queue_override,\n              'retry_count' => retry_count,\n            }\n          ]\n        }\n      end\n\n      it \"parses options from kochiku yml\" do\n        partitions = subject\n        expect(partitions.first[\"options\"][\"ruby\"]).to eq(\"ree-1.8.7-2011.12\")\n        expect(partitions.first[\"type\"]).to eq(\"rspec\")\n        expect(partitions.first[\"files\"]).not_to be_empty\n        expect(partitions.first[\"queue\"]).to eq(\"developer\")\n        expect(partitions.first[\"retry_count\"]).to eq(0)\n        expect(partitions.first['options']).not_to include('log_file_globs')\n      end\n\n      context \"with a master build\" do\n        let(:build) { FactoryBot.create(:convergence_branch_build) }\n\n        it \"should use the ci queue\" do\n          expect(build.branch_record).to be_convergence\n          expect(subject.first[\"queue\"]).to eq(\"ci\")\n        end\n\n        context \"with queue_override\" do\n          let(:queue_override) { \"override\" }\n          it \"should override the queue on the build part\" do\n            expect(subject.first[\"queue\"]).to eq(\"ci-override\")\n          end\n        end\n      end\n\n      context \"with a branch build\" do\n        it \"should use the developer queue\" do\n          expect(build.branch_record).to_not be_convergence\n          expect(subject.first[\"queue\"]).to eq(\"developer\")\n        end\n\n        context \"with queue_override\" do\n          let(:queue_override) { \"override\" }\n          it \"should override the queue on the build part\" do\n            expect(subject.first[\"queue\"]).to eq(\"developer-override\")\n          end\n        end\n\n        context \"with retry_count\" do\n          let(:retry_count) { 2 }\n          it \"should set the retry count\" do\n            expect(subject.first[\"retry_count\"]).to eq(2)\n          end\n        end\n      end\n    end\n\n    context 'with log_file_globs' do\n      let(:kochiku_yml) do\n        {\n          'log_file_globs' => log_files,\n          'targets' => [\n            {\n              'type' => 'other',\n              'glob' => 'spec/**/*_spec.rb',\n              'workers' => 1,\n            }\n          ]\n        }\n      end\n\n      context 'that uses a single string' do\n        let(:log_files) { 'mylog.log' }\n\n        it 'puts an array into the options' do\n          expect(subject.first['options']['log_file_globs']).to eq(['mylog.log'])\n        end\n      end\n\n      context 'that uses an array' do\n        let(:log_files) { ['mylog.log', 'another.log'] }\n\n        it 'puts the array into the options' do\n          expect(subject.first['options']['log_file_globs']).to eq(['mylog.log', 'another.log'])\n        end\n      end\n    end\n\n    context 'with different log_file_globs specified for different targets' do\n      let(:kochiku_yml) do\n        {\n          'targets' => [\n            {\n              'type' => 'unit',\n              'glob' => 'spec/**/*_spec.rb',\n              'workers' => 1,\n              'log_file_globs' => \"log1\",\n            },\n            {\n              'type' => 'other',\n              'glob' => 'spec/**/*_spec.rb',\n              'workers' => 1,\n              'log_file_globs' => \"log2\"\n            }\n          ]\n        }\n      end\n\n      it \"should parse log_file_globs properly\" do\n        expect(subject.first['options']['log_file_globs']).to eq(['log1'])\n        expect(subject.second['options']['log_file_globs']).to eq(['log2'])\n      end\n    end\n\n    context 'when the glob matches' do\n      before { allow(Dir).to receive(:[]).and_return(matches) }\n\n      context 'no files' do\n        let(:matches) { [] }\n        it 'does nothing' do\n          expect(subject).to eq([])\n        end\n      end\n\n      context 'one file' do\n        let(:matches) { %w(a) }\n        it 'makes one partition' do\n          expect(subject).to include(a_hash_including({ 'files' => %w(a) }))\n        end\n      end\n\n      context 'multiple files' do\n        let(:matches) { %w(a b c d) }\n\n        # :rspec_balance set to alphabetically above\n        it 'using alphabetically' do\n          partitions = subject\n          expect(partitions).to include(a_hash_including({ 'files' => %w(a b) }))\n          expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n          expect(partitions).to include(a_hash_including({ 'files' => %w(d) }))\n        end\n\n        context 'and balance is round_robin' do\n          let(:rspec_balance) { 'round_robin' }\n\n          it 'uses round_robin' do\n            partitions = subject\n            expect(partitions).to include(a_hash_including({ 'files' => %w(a d) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(b) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n          end\n\n          context 'and a manifest file is specified' do\n            before { allow(YAML).to receive(:load_file).with(rspec_manifest).and_return(%w(c b a)) }\n            let(:rspec_manifest) { 'manifest.yml' }\n            let(:matches) { %w(a b c d) }\n\n            it 'uses the manifest' do\n              partitions = subject\n              expect(partitions).to include(a_hash_including({ 'files' => %w(c d) }))\n              expect(partitions).to include(a_hash_including({ 'files' => %w(a) }))\n              expect(partitions).to include(a_hash_including({ 'files' => %w(b) }))\n            end\n          end\n\n          context 'and time manifest files are specified' do\n            before do\n              allow(YAML).to receive(:load_file).with(rspec_time_manifest).and_return(\n                {\n                  'a.spec' => [2],\n                  'b.spec' => [5, 8],\n                  'c.spec' => [9, 6],\n                  'd.spec' => [5, 8],\n                  'deleted.spec' => [10],\n                }\n              )\n              allow(YAML).to receive(:load_file).with(cuke_time_manifest).and_return(\n                {\n                  'f.feature' => [2],\n                  'g.feature' => [5, 8],\n                  'h.feature' => [6, 9],\n                  'i.feature' => [15, 16],\n                }\n              )\n              allow(Dir).to receive(:[]).with(\"spec/**/*_spec.rb\").and_return(spec_matches)\n              allow(Dir).to receive(:[]).with(\"features/**/*.feature\").and_return(feature_matches)\n            end\n\n            let(:rspec_time_manifest) { 'rspec_time_manifest.yml' }\n            let(:cuke_time_manifest) { 'cuke_time_manifest.yml' }\n            let(:spec_matches) { %w(a.spec b.spec c.spec d.spec e.spec) }\n            let(:feature_matches) { %w(f.feature g.feature h.feature i.feature) }\n\n            it 'should greedily partition files in the time_manifest, and round robin the remaining files' do\n              partitions = subject\n              expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['c.spec', 'e.spec'] }))\n              expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['d.spec', 'a.spec'] }))\n              expect(partitions).to include(a_hash_including({ 'type' => 'rspec', 'files' => ['b.spec'] }))\n              expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['i.feature'] }))\n              expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['h.feature'] }))\n              expect(partitions).to include(a_hash_including({ 'type' => 'cuke', 'files' => ['g.feature', 'f.feature']}))\n            end\n          end\n        end\n\n        context 'and balance is size' do\n          let(:rspec_balance) { 'size' }\n\n          before do\n            allow(File).to receive(:size).with('a').and_return(1)\n            allow(File).to receive(:size).with('b').and_return(1000)\n            allow(File).to receive(:size).with('c').and_return(100)\n            allow(File).to receive(:size).with('d').and_return(10)\n          end\n\n          it 'uses size' do\n            partitions = subject\n            expect(partitions).to include(a_hash_including({ 'files' => %w(b a) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(d) }))\n          end\n        end\n\n        context 'and balance is size_greedy_partitioning' do\n          let(:rspec_balance) { 'size_greedy_partitioning' }\n\n          before do\n            allow(File).to receive(:size).with('a').and_return(1)\n            allow(File).to receive(:size).with('b').and_return(1000)\n            allow(File).to receive(:size).with('c').and_return(100)\n            allow(File).to receive(:size).with('d').and_return(10)\n          end\n\n          it 'uses greedy_size' do\n            partitions = subject\n            expect(partitions).to include(a_hash_including({ 'files' => %w(b) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(d a) }))\n          end\n        end\n\n        context 'and balance is size_average_partitioning' do\n          let(:rspec_balance) { 'size_average_partitioning' }\n\n          before do\n            allow(File).to receive(:size).with('a').and_return(1)\n            allow(File).to receive(:size).with('b').and_return(1000)\n            allow(File).to receive(:size).with('c').and_return(100)\n            allow(File).to receive(:size).with('d').and_return(10)\n          end\n\n          it 'uses size_average' do\n            partitions = subject\n            expect(partitions).to include(a_hash_including({ 'files' => %w(a b) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(d) }))\n          end\n        end\n\n        context 'and balance is isolated' do\n          let(:rspec_balance) { 'isolated' }\n\n          it 'isolates files' do\n            partitions = subject\n            expect(partitions).to include(a_hash_including({ 'files' => %w(a) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(b) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(c) }))\n            expect(partitions).to include(a_hash_including({ 'files' => %w(d) }))\n          end\n        end\n      end\n    end\n\n    context 'when target is specified' do\n      let(:kochiku_yml) do\n        {\n          'targets' => [\n            {\n              'type' => 'instrumentation',\n              'target' => 'util:libraryA'\n            },\n            {\n              'type' => 'instrumentation',\n              'target' => ['util:libraryB', 'util:libraryC']\n            }\n          ]\n        }\n      end\n\n      it 'accepts both strings and arrays' do\n        expect(subject.size).to eq(2)\n        expect(subject[0]['files']).to eq(['util:libraryA'])\n        expect(subject[1]['files']).to eq(['util:libraryB', 'util:libraryC'])\n      end\n\n      context \"when workers and/or glob are also specified\" do\n        let(:kochiku_yml) do\n          {\n            'targets' => [\n              {\n                'type' => 'instrumentation',\n                'target' => 'util:libraryA',\n                'workers' => 10,\n                'glob' => 'spec/**/*_spec.rb'\n              }\n            ]\n          }\n        end\n\n        it 'ignores them' do\n          expect(subject.size).to eq(1)\n          expect(subject[0]['files']).to eq(['util:libraryA'])\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/partitioner_spec.rb",
    "content": "require 'spec_helper'\nrequire 'partitioner'\n\ndescribe Partitioner do\n  let(:build) { FactoryBot.create(:build, ref: to_40('1')) }\n\n  before do\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(kochiku_yml)\n    allow(GitRepo).to receive(:inside_repo).and_yield\n    allow(GitRepo).to receive(:inside_copy).and_yield\n  end\n\n  describe \"#for_build\" do\n    subject { Partitioner.for_build(build) }\n\n    context \"when there is no kochiku.yml\" do\n      let(:kochiku_yml) { nil }\n\n      it \"should return a single partiion\" do\n        partitions = subject.partitions\n        expect(partitions.size).to eq(1)\n        expect(partitions.first[\"type\"]).to eq(\"test\")\n        expect(partitions.first[\"files\"]).not_to be_empty\n      end\n    end\n\n    context \"when there is a kochiku.yml\" do\n      context \"when no partitioner is specified\" do\n        let(:kochiku_yml) do\n          {\n            \"ruby\" => [\"ree-1.8.7-2011.12\"],\n            \"targets\" => [\n              {\n                'type' => 'rspec',\n                'glob' => 'spec/**/*_spec.rb',\n                'workers' => 3,\n              }\n            ]\n          }\n        end\n\n        it \"parses options from kochiku yml\" do\n          allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc)\n          expect(Rails.logger).to receive(:info).with(\"Partition finished: [DEFAULT] 0.0 1111111111111111111111111111111111111111\")\n          partitions = subject.partitions\n          expect(partitions.first[\"options\"][\"ruby\"]).to eq(\"ree-1.8.7-2011.12\")\n          expect(partitions.first[\"type\"]).to eq(\"rspec\")\n          expect(partitions.first[\"files\"]).not_to be_empty\n          expect(partitions.first[\"queue\"]).to eq(\"developer\")\n          expect(partitions.first[\"retry_count\"]).to eq(0)\n        end\n      end\n\n      context \"when using the maven partitioner\" do\n        let(:kochiku_yml) { {'partitioner' => 'maven'} }\n\n        it \"should call the maven partitioner\" do\n          allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc)\n          expect(Rails.logger).to receive(:info).with(\"Partition finished: [maven] 0.0 1111111111111111111111111111111111111111\")\n          expect(subject).to be_a(Partitioner::Maven)\n        end\n      end\n\n      context \"when using the dependency_map partitioner\" do\n        let(:kochiku_yml) { {'partitioner' => 'dependency_map'} }\n\n        it \"should call the dependency_map partitioner\" do\n          allow(Time).to receive(:now).and_return(Time.new(1977, 3, 10, 5, 30, 0).utc)\n          expect(Rails.logger).to receive(:info).with(\"Partition finished: [dependency_map] 0.0 1111111111111111111111111111111111111111\")\n          expect(subject).to be_a(Partitioner::DependencyMap)\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/remote_server/github_spec.rb",
    "content": "require 'spec_helper'\nrequire 'remote_server'\nrequire 'remote_server/github'\n\ndescribe RemoteServer::Github do\n  def make_server(url)\n    described_class.new(url, Settings.git_server(url))\n  end\n\n  describe \"base_api_url\" do\n    describe \"for github.com\" do\n      it \"should use the api subdomain\" do\n        url = \"git@github.com:square/kochiku.git\"\n        expect(make_server(url).base_api_url).to eq(\"https://api.github.com/repos/square/kochiku\")\n      end\n    end\n\n    describe \"for github enterprise\" do\n      it \"should use the api path prefix\" do\n        url = \"git@git.example.com:square/kochiku.git\"\n        expect(make_server(url).base_api_url).to eq(\"https://git.example.com/api/v3/repos/square/kochiku\")\n      end\n    end\n  end\n\n  describe '#attributes' do\n    it 'raises UnknownUrlFormat for invalid urls' do\n      expect {\n        make_server(\"https://github.com/blah\")\n      }.to raise_error(RemoteServer::UnknownUrlFormat)\n\n      expect {\n        make_server(\"github.com/asdf\")\n      }.to raise_error(RemoteServer::UnknownUrlFormat)\n    end\n\n    it 'parses ssh URLs' do\n      result = make_server(\"git@github.com:who/myrepo.git\")\n\n      expect(result.attributes).to eq(\n        host:                 'github.com',\n        repository_namespace: 'who',\n        repository_name:      'myrepo',\n        possible_hosts:       ['github.com']\n      )\n    end\n\n    it 'parses git:// URLs' do\n      result = make_server(\"git://github.com/who/myrepo.git\")\n\n      expect(result.attributes).to eq(\n        host:                 'github.com',\n        repository_namespace: 'who',\n        repository_name:      'myrepo',\n        possible_hosts:       ['github.com']\n      )\n    end\n\n    it 'parses HTTPS URLs' do\n      result = make_server(\"https://git.example.com/who/myrepo.git\")\n\n      expect(result.attributes).to eq(\n        host:                 'git.example.com',\n        repository_namespace: 'who',\n        repository_name:      'myrepo',\n        possible_hosts:       ['git.example.com']\n      )\n    end\n\n    it 'should allow periods, hyphens, and underscores in repository names' do\n      result = make_server(\"git@github.com:angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n\n      result = make_server(\"git://github.com/angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n\n      result = make_server(\"https://github.com/angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n    end\n\n    it 'should not allow characters disallowed by Github in repository names' do\n      %w(! @ # $ % ^ & * ( ) = + \\ | ` ~ [ ] { } : ; ' \" ? /).each do |symbol|\n        expect {\n          make_server(\"git@github.com:angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n\n        expect {\n          make_server(\"git://github.com/angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n\n        expect {\n          make_server(\"https://github.com/angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n      end\n    end\n  end\n\n  describe '#canonical_repository_url' do\n    it 'should return a ssh url when given a https url' do\n      https_url = \"https://github.com/square/test-repo1.git\"\n      result = make_server(https_url).canonical_repository_url\n      expect(result).to eq(\"git@github.com:square/test-repo1.git\")\n    end\n\n    it 'should do nothing when given a ssh url' do\n      ssh_url = \"git@github.com:square/test-repo1.git\"\n      result = make_server(ssh_url).canonical_repository_url\n      expect(result).to eq(ssh_url)\n    end\n  end\n\n  describe '#get_branch_url' do\n    it 'should return the expected url' do\n      https_url = \"https://github.com/square/test-repo1.git\"\n      result = make_server(https_url).get_branch_url('my-new-branch')\n      expect(result).to eq(\"https://github.com/square/test-repo1/tree/my-new-branch\")\n    end\n  end\n\n  describe '#open_pull_request_url' do\n    it 'should return the expected url' do\n      https_url = \"https://github.com/square/test-repo1.git\"\n      result = make_server(https_url).open_pull_request_url('my-new-branch')\n      expect(result).to eq(\"https://github.com/square/test-repo1/pull/new/master...my-new-branch\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/remote_server/stash_spec.rb",
    "content": "require 'spec_helper'\nrequire 'remote_server'\nrequire 'remote_server/stash'\n\ndescribe 'stash integration test' do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n        username: stashuser\n        password_file: /password\n    YAML\n    stub_const \"Settings\", settings\n\n    allow(File).to receive(:read).with(\"/password\").and_return(\"stashpassword\")\n  end\n\n  let(:url) { 'https://stash.example.com/scm/foo/bar.git' }\n  let(:stash) { RemoteServer::Stash.new(url, Settings.git_server(url)) }\n  let(:stash_request) { stash.stash_request }\n\n  describe \".setup_auth!\" do\n    it \"should send username and password on\" do\n      request = double\n      expect(request).to receive(:basic_auth).with(\"stashuser\", \"stashpassword\")\n      stash_request.setup_auth!(request)\n    end\n  end\n\n  describe \"#update_commit_status!\" do\n    let(:build) {\n      double('build',\n             ref:        'abc123',\n             repository: double('repository', to_param: 'my_namespace/my_repo_name'),\n             succeeded?: true,\n             id:         123)\n    }\n\n    it \"should post to stash\" do\n      stub_request(:post, \"https://@stash.example.com/rest/build-status/1.0/commits/#{build.ref}\")\n        .with(basic_auth: ['stashuser', 'stashpassword'])\n\n      stash.update_commit_status!(build)\n\n      expect(WebMock).to have_requested(:post, \"https://stash.example.com/rest/build-status/1.0/commits/#{build.ref}\")\n    end\n  end\nend\n\ndescribe RemoteServer::Stash do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n        aliases:\n          - git-alias.example.com\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  def make_server(url)\n    described_class.new(url, Settings.git_server(url))\n  end\n\n  describe '#attributes' do\n    it 'parses HTTPS url' do\n      result = make_server \\\n        \"https://stash.example.com/scm/myproject/myrepo.git\"\n\n      expect(result.attributes).to include(\n        host:                 'stash.example.com',\n        repository_namespace: 'myproject',\n        repository_name:      'myrepo'\n      )\n    end\n\n    it 'does not support HTTP auth credentials in URL' do\n      # Use a netrc file instead.\n      expect {\n        make_server \\\n          \"https://don@stash.example.com/scm/myproject/myrepo.git\"\n      }.to raise_error(RemoteServer::UnknownUrlFormat)\n    end\n\n    it 'parses ssh URLs' do\n      result = make_server \\\n        \"git@stash.example.com:myproject/myrepo.git\"\n\n      expect(result.attributes).to include(\n        host:                 'stash.example.com',\n        repository_namespace: 'myproject',\n        repository_name:      'myrepo'\n      )\n    end\n\n    it 'parses ssh URLs prefixed with ssh://' do\n      result = make_server \\\n        \"ssh://git@stash.example.com/myproject/myrepo.git\"\n\n      expect(result.attributes).to include(\n        host:                 'stash.example.com',\n        repository_namespace: 'myproject',\n        repository_name:      'myrepo'\n      )\n    end\n\n    it 'parses ssh URLs with an explicit port' do\n      result = make_server \\\n        \"ssh://git@stash.example.com:7999/myproject/myrepo.git\"\n\n      expect(result.attributes).to include(\n        host:                 'stash.example.com',\n        repository_namespace: 'myproject',\n        repository_name:      'myrepo',\n        port:                 '7999'\n      )\n    end\n\n    it 'should allow periods, hyphens, and underscores in repository names' do\n      result = make_server(\"git@stash.example.com:angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n\n      result = make_server(\"ssh://git@stash.example.com/angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n\n      result = make_server(\"https://stash.example.com/scm/angular/an-gu_lar.js.git\")\n      expect(result.attributes[:repository_name]).to eq('an-gu_lar.js')\n    end\n\n    it 'should not allow characters disallowed by Github in repository names' do\n      %w(! @ # $ % ^ & * ( ) = + \\ | ` ~ [ ] { } : ; ' \" ? /).each do |symbol|\n        expect {\n          make_server(\"git@stash.example.com:angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n\n        expect {\n          make_server(\"ssh://git@stash.example.com/angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n\n        expect {\n          make_server(\"https://stash.example.com/scm/angular/bad#{symbol}name.git\")\n        }.to raise_error(RemoteServer::UnknownUrlFormat)\n      end\n    end\n  end\n\n  describe \"#canonical_repository_url\" do\n    it 'should return a https url when given a ssh url' do\n      ssh_url = \"ssh://git@stash.example.com:7999/foo/bar.git\"\n      result = make_server(ssh_url).canonical_repository_url\n      expect(result).to eq(\"https://stash.example.com/scm/foo/bar.git\")\n    end\n\n    it 'should do nothing when given a https url' do\n      https_url = \"https://stash.example.com/scm/foo/bar.git\"\n      result = make_server(https_url).canonical_repository_url\n      expect(result).to eq(https_url)\n    end\n  end\n\n  describe \"#base_api_url\" do\n    it 'should use the primary host name' do\n      https_url = \"https://git-alias.example.com/scm/foo/bar.git\"\n      result = make_server(https_url).base_api_url\n      expect(result).to eq('https://stash.example.com/rest/api/1.0/projects/foo/repos/bar')\n    end\n  end\n\n  describe \"#base_html_url\" do\n    it 'should use the primary host name' do\n      https_url = \"https://git-alias.example.com/scm/foo/bar.git\"\n      result = make_server(https_url).base_html_url\n      expect(result).to eq('https://stash.example.com/projects/FOO/repos/bar')\n    end\n  end\n\n  describe \"#merge\" do\n    it 'uses stash API' do\n      https_url = \"https://stash.example.com/scm/foo/bar.git\"\n      server = make_server(https_url)\n\n      allow(server).to receive(:get_pr_id_and_version).and_return([1, 5])\n      allow(server).to receive(:can_merge?).and_return(true)\n      allow(server).to receive(:perform_merge).and_return(true)\n\n      expect(server).to receive(:get_pr_id_and_version).once\n      expect(server).to receive(:can_merge?).once\n      expect(server).to receive(:perform_merge).once\n\n      expect { server.merge(\"abranch\") }.to_not raise_error\n    end\n  end\n\n  describe '#get_branch_url' do\n    it 'should return the expected url' do\n      https_url = \"https://stash.example.com/scm/foo/bar.git\"\n      result = make_server(https_url).get_branch_url('my-new-branch')\n      expect(result).to eq(\"https://stash.example.com/projects/FOO/repos/bar?at=refs/heads/my-new-branch\")\n    end\n  end\n\n  describe '#open_pull_request_url' do\n    it 'should return the expected url' do\n      https_url = \"https://stash.example.com/scm/foo/bar.git\"\n      result = make_server(https_url).open_pull_request_url('my-new-branch')\n      expect(result).to eq(\"https://stash.example.com/projects/FOO/repos/bar/compare/commits?sourceBranch=refs/heads/my-new-branch\")\n    end\n  end\n\n  describe \"#head_commit\" do\n    let(:https_url) { \"https://stash.example.com/scm/foo/bar.git\" }\n    let(:server) { make_server(https_url) }\n    let(:stash_request) { server.stash_request }\n\n    it \"should not raise errors\" do\n      allow(server).to receive(:get_pr_id_and_version).and_return([1, 5])\n      allow(stash_request).to receive(:get).and_return({\"values\" => [{\"id\" => \"3\" * 40}]}.to_json)\n\n      expect(server).to receive(:get_pr_id_and_version).once\n      expect { server.head_commit(\"a/branch\") }.to_not raise_error\n    end\n  end\nend\n"
  },
  {
    "path": "spec/lib/remote_server_spec.rb",
    "content": "require 'spec_helper'\nrequire 'remote_server'\nrequire 'remote_server/github'\nrequire 'remote_server/stash'\n\nshared_examples_for 'a remote server' do\n  describe \"#sha_for_branch\" do\n    let(:url) { good_url }\n    let(:repo_uri) { remote_server.base_api_url }\n    let(:branch) { \"test/branch\" }\n    let(:branch_head_sha) { \"4b41fe773057b2f1e2063eb94814d32699a34541\" }\n\n    let(:subject) { remote_server.sha_for_branch(branch) }\n\n    it \"returns the HEAD SHA for the branch\" do\n      expect(subject).to eq(branch_head_sha)\n    end\n\n    context \"with a non-existent repo\" do\n      let(:url) { bad_url }\n\n      before do\n        stub_request(:get, \"#{repo_uri}/git/refs/heads/#{branch}\").to_return(:status => 404, :body => '{ \"message\": \"Not Found\" }')\n        stub_request(:get, \"https://stash.example.com/rest/api/1.0/projects/sq/repos/non-existent-repo/commits?limit=1&until=#{branch}\")\n          .with(basic_auth: ['stashuser', 'stashpassword'])\n          .to_return(:status => 404, :body => '{ \"errors\": [ { \"context\": null, \"message\": \"A detailed error message.\", \"exceptionName\": null } ] }')\n      end\n\n      it \"raises RepositoryDoesNotExist\" do\n        expect{ subject }.to raise_error(RemoteServer::RefDoesNotExist)\n      end\n    end\n\n    context \"with a non-existent branch\" do\n      let(:branch) { \"nonexistant-branch\" }\n\n      before do\n        stub_request(:get, \"#{repo_uri}/git/refs/heads/#{branch}\").to_return(:status => 404, :body => '{ \"message\": \"Not Found\" }')\n        stub_request(:get, \"https://@stash.example.com/rest/api/1.0/projects/sq/repos/kochiku/commits?limit=1&until=#{branch}\")\n          .with(basic_auth: ['stashuser', 'stashpassword'])\n          .to_return(:status => 400, :body => '{ \"errors\": [ { \"context\": null, \"message\": \"A detailed error message.\", \"exceptionName\": null } ] }')\n      end\n\n      it \"raises BranchDoesNotExist\" do\n        expect{ subject }.to raise_error(RemoteServer::RefDoesNotExist)\n      end\n    end\n  end\nend\n\ndescribe 'RemoteServer::GitHub' do\n  let(:good_url) { 'git@git.example.com:square/kochiku.git' }\n  let(:bad_url) { 'git@git.example.com:square/non-existent-repo.git' }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n      git_servers:\n        git.example.com:\n          type: github\n    YAML\n    stub_const \"Settings\", settings\n\n    build_ref_info = <<-RESPONSE\n      {\n        \"ref\": \"refs/heads/#{branch}\",\n        \"url\": \"#{repo_uri}/git/refs/heads/#{branch}\",\n        \"object\": {\n          \"sha\": \"#{branch_head_sha}\",\n          \"type\": \"commit\",\n          \"url\": \"#{repo_uri}/git/commits/#{branch_head_sha}\"\n        }\n      }\n    RESPONSE\n\n    stub_request(:get, \"#{repo_uri}/git/refs/heads/#{branch}\").to_return(:status => 200, :body => build_ref_info)\n  end\n\n  it_behaves_like 'a remote server' do\n    let(:remote_server) { RemoteServer::Github.new(url, Settings.git_server(url)) }\n  end\nend\n\ndescribe 'RemoteServer::Stash' do\n  let(:good_url) { 'ssh://git@stash.example.com/sq/kochiku.git' }\n  let(:bad_url) { 'ssh://git@stash.example.com/sq/non-existent-repo.git' }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n        username: stashuser\n        password_file: /password\n    YAML\n    stub_const \"Settings\", settings\n\n    build_ref_info = <<-RESPONSE\n    {\n        \"size\": 3,\n        \"limit\": 3,\n        \"isLastPage\": false,\n        \"values\": [\n        {\n            \"id\": \"#{branch_head_sha}\"\n        }\n        ],\n        \"start\": 0,\n        \"filter\": null,\n        \"nextPageStart\": 3\n    }\n    RESPONSE\n\n    allow(File).to receive(:read).with(\"/password\").and_return(\"stashpassword\")\n\n    stub_request(:get, \"https://stash.example.com/rest/api/1.0/projects/sq/repos/kochiku/commits?limit=1&until=#{branch}\")\n      .with(basic_auth: ['stashuser', 'stashpassword'])\n      .to_return(:status => 200, :body => build_ref_info)\n  end\n\n  it_behaves_like 'a remote server' do\n    let(:remote_server) { RemoteServer::Stash.new(url, Settings.git_server(url)) }\n  end\nend\n\ndescribe 'valid_git_host?' do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  it 'returns true for known git hosts' do\n    known_git_host = 'git.example.com'\n    expect(RemoteServer.valid_git_host?(known_git_host)).to be_truthy\n  end\n\n  it 'returns false for unknown git hosts' do\n    unknown_git_host = 'example.com'\n    expect(RemoteServer.valid_git_host?(unknown_git_host)).to be_falsey\n  end\nend\n"
  },
  {
    "path": "spec/lib/server_settings_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe ServerSettings do\n\n  it \"should be able to access the common settings\" do\n    options = {\n      :type => 'github',\n      :mirror => 'git://git-mirror.example.com/',\n      :aliases => ['alias1.example.com', 'alias2.example.com'],\n    }\n    settings = ServerSettings.new(options, 'git.example.com')\n\n    expect(settings.host).to eq('git.example.com')\n    expect(settings.type).to eq('github')\n    expect(settings.mirror).to eq('git://git-mirror.example.com/')\n    expect(settings.aliases).to eq(['alias1.example.com', 'alias2.example.com'])\n  end\n\n  context \"github settings\" do\n    describe \"oauth token\" do\n      it \"should read the file and expose the token\" do\n        allow(File).to receive(:read).with('/secrets/github_oauth_token').and_return(\"oauth_token_for_test\\n\")\n\n        settings = ServerSettings.new({ oauth_token_file: '/secrets/github_oauth_token' }, 'git.example.com')\n\n        expect(settings.oauth_token)\n          .to eq('oauth_token_for_test')\n      end\n    end\n  end\n\n  context \"stash settings\" do\n    it 'should work' do\n      allow(File).to receive(:read).with('/secrets/stash').and_return(\"some_password\\n\")\n\n      options = {\n        :type => 'stash',\n        :username => 'kochiku',\n        :password_file => '/secrets/stash',\n      }\n      settings = ServerSettings.new(options, 'stash.example.com')\n\n      expect(settings.type).to eq('stash')\n      expect(settings.stash_username).to eq('kochiku')\n      expect(settings.stash_password).to eq('some_password')\n    end\n\n    describe \"stash password file\" do\n      before do\n        File.open(File.join(RSpec.configuration.fixture_path, \"stash-pass.txt\"), 'w') { |f| f.write(\"fake-stash-password\") }\n      end\n      after do\n        File.unlink(File.join(RSpec.configuration.fixture_path, \"stash-pass.txt\"))\n      end\n\n      it 'will work with a relative path' do\n        settings = ServerSettings.new({ password_file: 'spec/fixtures/stash-pass.txt' }, 'stash.example.com')\n        expect(settings.stash_password).to eq(\"fake-stash-password\")\n      end\n    end\n  end\n\nend\n"
  },
  {
    "path": "spec/lib/settings_accessor_spec.rb",
    "content": "require 'settings_accessor'\n\ndescribe SettingsAccessor do\n  describe 'kochiku_protocol' do\n    it 'returns https when use_https is truthy' do\n      settings = SettingsAccessor.new(\"use_https: true\")\n      expect(settings.kochiku_protocol).to eq(\"https\")\n    end\n\n    it 'returns https when use_https is false' do\n      settings = SettingsAccessor.new(\"use_https: false\")\n      expect(settings.kochiku_protocol).to eq(\"http\")\n    end\n\n    it 'returns https when use_https is not present' do\n      settings = SettingsAccessor.new(\"blah: blah\")\n      expect(settings.kochiku_protocol).to eq(\"http\")\n    end\n  end\n\n  it \"should support multiple git servers\" do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n        username: robot\n      github.com:\n        type: github\n      github-enterprise.example.com:\n        type: github\n        mirror: 'git://git-mirror.example.com/'\n    YAML\n    expect(settings.git_servers.keys)\n      .to match_array(%w{stash.example.com github.com github-enterprise.example.com})\n    expect(settings.git_servers['stash.example.com'].type). to eq('stash')\n    expect(settings.git_servers['github-enterprise.example.com'].type).to eq('github')\n  end\n\n  it \"can look up git servers\" do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n      github.com:\n        type: github\n    YAML\n    expect(settings.git_server('git@stash.example.com:square/kochiku.git').type).to eq('stash')\n    expect(settings.git_server('https://github.com/square/kochiku.git').type).to eq('github')\n    expect(settings.git_server('https://foobar.com/square/kochiku.git')).to eq(nil)\n  end\n\n  it \"can look up git servers via alias\" do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n        aliases:\n          - stash-alias.example.com\n          - other-stash-alias.example.com\n      github.com:\n        type: github\n    YAML\n    expect(settings.git_server('git@stash-alias.example.com:square/kochiku.git').host).to eq('stash.example.com')\n    expect(settings.git_server('git@other-stash-alias.example.com:square/kochiku.git').host).to eq('stash.example.com')\n    expect(settings.git_server('git@not-an-alias.example.com:square/kochiku.git')).to eq(nil)\n  end\n\n  it \"can also give me the host which matched\" do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      github.com:\n        type: github\n    YAML\n    expect(settings.git_server('https://github.com/square/kochiku.git').host).to eq('github.com')\n  end\n\n  it \"still works if git_servers is not in the config file\" do\n    settings = SettingsAccessor.new(\"another_setting:\\n\")\n    expect(settings.git_server('https://github.com/square/kochiku.git')).to eq(nil)\n  end\n\n  it \"still works if a host is listed without any data\" do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n    YAML\n    expect(settings.git_server('git@git.example.com:square/kochiku.git').type).to eq(nil)\n  end\nend\n"
  },
  {
    "path": "spec/lib/stash_merge_executor_spec.rb",
    "content": "require 'spec_helper'\nrequire 'stash_merge_executor'\n\ndescribe StashMergeExecutor do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      github.com:\n        type: github\n      stash.example.com:\n        type: stash\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  let(:repository) { FactoryBot.create(:stash_repository) }\n  let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') }\n  let(:stash_build) { FactoryBot.create(:build, branch_record: branch) }\n  let(:stash_merger) { described_class.new(stash_build) }\n\n  context \"Using stash repository\" do\n    subject { stash_merger.merge_and_push }\n\n    before do\n      allow_any_instance_of(RemoteServer::Stash).to receive(:head_commit).and_return([1, 5])\n    end\n\n    it \"should use stash REST api\" do\n      expect(stash_build.repository.remote_server).to receive(:merge).once\n      allow(stash_build.repository.remote_server).to receive(:merge).and_return(true)\n\n      expect(subject).to eq(merge_commit: [1, 5], log_output: \"Successfully merged funyuns\")\n    end\n\n    it \"should use throw exception if stash api refuses merge\" do\n      expect(stash_build.repository.remote_server).to receive(:merge).once\n      allow(stash_build.repository.remote_server).to receive(:merge).and_return(false)\n\n      expect { subject }.to raise_error(StashMergeExecutor::GitMergeFailedError)\n    end\n\n    context \"for a build on a convergence branch\" do\n      let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) }\n\n      it \"should raise an exception\" do\n        expect { subject }.to raise_error(/ineligible for merge/)\n      end\n    end\n  end\n\n  describe \"#delete\" do\n    context \"Using stash repository\" do\n      it \"should use stash REST api\" do\n        expect(stash_build.repository.remote_server).to receive(:delete_branch).once\n        allow(stash_build.repository.remote_server).to receive(:delete_branch).and_return(true)\n\n        expect { stash_merger.delete_branch }.to_not raise_error\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/mailers/build_mailer_spec.rb",
    "content": "require 'spec_helper'\nrequire 'partitioner'\n\ndescribe BuildMailer do\n\n  describe \"#error_email\" do\n    before do\n      allow(Settings).to receive(:sender_email_address).and_return('kochiku@example.com')\n      allow(Settings).to receive(:kochiku_notifications_email_address).and_return('notify@example.com')\n    end\n\n    it \"sends the email\" do\n      build_attempt = FactoryBot.build(:build_attempt, :state => 'errored', :builder => \"test-builder\")\n\n      email = BuildMailer.error_email(build_attempt, \"error text\")\n\n      expect(email.to).to include('notify@example.com')\n\n      expect(email.from).to eq(['kochiku@example.com'])\n\n      expect(email.html_part.body).to include(\"test-builder\")\n      expect(email.text_part.body).to include(\"test-builder\")\n      expect(email.html_part.body).to include(\"http://\")\n      expect(email.text_part.body).to include(\"http://\")\n      expect(email.html_part.body).to include(\"error text\")\n      expect(email.text_part.body).to include(\"error text\")\n    end\n  end\n\n  describe \"#build_break_email\" do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') }\n    let(:build) { FactoryBot.create(:build, branch_record: branch) }\n\n    before do\n      partitioner = instance_double('Partitioner::Base')\n      allow(partitioner).to receive(:emails_for_commits_causing_failures).and_return({})\n      allow(Partitioner).to receive(:for_build).and_return(partitioner)\n\n      build_part = build.build_parts.create!(:paths => [\"a\", \"b\"], :kind => \"cucumber\", :queue => 'ci')\n      @build_attempt = build_part.build_attempts.create!(:state => 'failed', :builder => \"test-builder\")\n      FactoryBot.create(:stdout_build_artifact, build_attempt: @build_attempt)\n    end\n\n    context \"on a convergence branch\" do\n      before do\n        branch.update_attribute(:convergence, true)\n\n        allow(GitBlame).to receive(:changes_since_last_green).and_return([{:hash => \"sha\", :author => \"Joe\", :date => \"some day\", :message => \"always be shipping it\"}])\n        allow(GitBlame).to receive(:emails_since_last_green).and_return([\"foo@example.com\"])\n      end\n\n      it \"sends the email\" do\n        expect(build.branch_record.convergence?).to be(true)\n\n        email = BuildMailer.build_break_email(build)\n\n        expect(email.to).to eq([\"foo@example.com\"])\n        expect(email.html_part.body).to include(build.branch_record.name)\n        expect(email.text_part.body).to include(build.branch_record.name)\n        expect(email.html_part.body).to include(\"http://\")\n        expect(email.text_part.body).to include(\"http://\")\n      end\n    end\n\n    context \"on a non-convergence branch\" do\n      before do\n        expect(build.branch_record.convergence?).to be(false)\n\n        allow(GitBlame).to receive(:changes_in_branch).and_return([{:hash => \"sha\", :author => \"Joe\", :date => \"some day\", :message => \"always be shipping it\"}])\n        allow(GitBlame).to receive(:emails_in_branch).and_return([\"foo@example.com\"])\n      end\n\n      it \"sends the email\" do\n        email = BuildMailer.build_break_email(build)\n\n        expect(email.to).to eq([\"foo@example.com\"])\n        expect(email.html_part.body).to include(build.branch_record.name)\n        expect(email.text_part.body).to include(build.branch_record.name)\n        expect(email.html_part.body).to include(\"http://\")\n        expect(email.text_part.body).to include(\"http://\")\n      end\n    end\n\n    context \"with emails from a partitioner\" do\n      before do\n        partitioner = instance_double('Partitioner::Base')\n        allow(partitioner).to receive(:emails_for_commits_causing_failures).and_return({'foo@example.com' => ['sha']})\n        allow(Partitioner).to receive(:for_build).and_return(partitioner)\n        allow(GitBlame).to receive(:changes_since_last_green).and_return([{:hash => \"sha\", :author => \"Foo\", :date => \"some day\", :message => \"does this work? LOL\"}])\n      end\n\n      it \"uses those emails\" do\n        email = BuildMailer.build_break_email(build)\n\n        expect(email.to).to eq([\"foo@example.com\"])\n        expect(email.html_part.body).to include(build.branch_record.name)\n        expect(email.text_part.body).to include(build.branch_record.name)\n        expect(email.html_part.body).to include(\"http://\")\n        expect(email.text_part.body).to include(\"http://\")\n        expect(email.html_part.body).to_not include(\"pull-requests/\")\n      end\n\n      context \"when the build is tied to an open pull request on Stash\" do\n        before do\n          allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash)\n          allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_return(3, 4)\n        end\n\n        it \"includes link to PR\" do\n          build_part = build.build_parts.create!(:paths => [\"a\", \"b\"], :kind => \"cucumber\", :queue => 'ci')\n          build_part.build_attempts.create!(:state => 'passed', :builder => \"test-builder\")\n\n          email = BuildMailer.build_break_email(build)\n          expect(email.html_part.body).to include(\"pull-requests/3/overview\")\n        end\n      end\n    end\n\n    describe 'failed build part information' do\n      context 'stdout log file has been uploaded' do\n        it 'should link to the log file' do\n          stdout_artifact = @build_attempt.build_artifacts.stdout_log.first\n          email = BuildMailer.build_break_email(build)\n          expect(email.html_part.body).to include(build_artifact_url(stdout_artifact))\n        end\n      end\n\n      context 'stdout log file has not been uploaded yet' do\n        before do\n          @build_attempt.build_artifacts.delete_all\n        end\n\n        it 'should not link to the log file' do\n          email = BuildMailer.build_break_email(build)\n          expect(email.html_part.body).to_not include(\"/build_artifacts/\")\n        end\n      end\n    end\n  end\n\n  describe '#build_success_email' do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') }\n    let(:build) { FactoryBot.create(:build, branch_record: branch) }\n\n    before do\n      allow(GitBlame).to receive(:changes_in_branch).and_return([{hash: \"sha\", author: \"Joe\", date: \"some day\", message: \"always be shipping it\"}])\n      allow(GitBlame).to receive(:last_email_in_branch).and_return([\"foo@example.com\"])\n\n      build_part = build.build_parts.create!(paths: [\"a\", \"b\"], kind: \"cucumber\", queue: 'ci')\n      build_part.build_attempts.create!(state: 'passed', builder: \"test-builder\")\n    end\n\n    it \"sends an email\" do\n      email = BuildMailer.build_success_email(build)\n\n      expect(email.to).to eq([\"foo@example.com\"])\n\n      expect(email.html_part.body).to include(repository.name)\n      expect(email.text_part.body).to include(repository.name)\n      expect(email.html_part.body).to include(\"http://\")\n      expect(email.text_part.body).to include(\"http://\")\n    end\n\n    context \"stash repository\" do\n      let(:repository) { FactoryBot.create(:stash_repository) }\n      let(:branch) { FactoryBot.create(:branch, repository: repository, name: 'funyuns') }\n      let(:build) { FactoryBot.create(:build, branch_record: branch) }\n\n      context \"build has an open pull request\" do\n        before do\n          allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash)\n          allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_return(3, 4)\n        end\n\n        it \"includes link to PR\" do\n          email = BuildMailer.build_success_email(build)\n          expect(email.html_part.body).to include(\"pull-requests/3/overview\")\n        end\n      end\n\n      context \"build does not have a pull request\" do\n        before do\n          allow(build.repository.remote_server).to receive(:class).and_return(RemoteServer::Stash)\n          allow(build.repository.remote_server).to receive(:get_pr_id_and_version).and_raise(RemoteServer::StashAPIError)\n        end\n\n        it \"does not link to a pull request\" do\n          email = BuildMailer.build_success_email(build)\n          expect(email.html_part.body).to_not include(\"pull-requests/\")\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/mailers/merge_mailer_spec.rb",
    "content": "require \"spec_helper\"\n\ndescribe MergeMailer do\n  describe \"merge_successful\" do\n    it \"sends the email\" do\n      email = MergeMailer.merge_successful(FactoryBot.create(:build), to_40('w'), [\"foo@example.com\"], 'deploy log')\n      expect(email.to).to include(\"foo@example.com\")\n    end\n  end\n\n  describe \"merge_failed\" do\n    it \"sends the email\" do\n      email = MergeMailer.merge_failed(FactoryBot.create(:build), [\"foo@example.com\"], 'deploy log')\n      expect(email.to).to include(\"foo@example.com\")\n    end\n  end\nend\n"
  },
  {
    "path": "spec/mailers/previews/build_mailer_preview.rb",
    "content": "require 'git_blame'\nrequire 'partitioner'\n\n# Stub GitBlame behavior for mailer preview\nclass GitBlame\n  class << self\n    def emails_since_last_green(build)\n      ['test@example.com']\n    end\n\n    def emails_in_branch(build)\n      ['test@example.com']\n    end\n\n    def last_email_in_branch(build)\n      ['test@example.com']\n    end\n\n    def changes_since_last_green(build)\n      [{:hash => \"fed2d1188b2005eea51b3d87f819b2ebbdbeb67a\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Wed Jul 2 15:13:31 2014 -0700\",\n        :message =>\n      \"Run success script built back in, Thread checks removed from repo logic\"},\n       {:hash => \"a556abc734b7fc284436270bce5c52a409b58fb8\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Mon Jun 30 17:22:12 2014 -0700\",\n        :message => \"This is a\\n multi line commit message\\n so ha.\"},\n       {:hash => \"7abb9c3194ed1793e78a2928c793d9f4172afab2\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Mon Jun 30 17:22:12 2014 -0700\",\n        :message => \"Put file reading logic into GitRepo\"}]\n    end\n\n    def changes_in_branch(build)\n      [{:hash => \"fed2d1188b2005eea51b3d87f819b2ebbdbeb67a\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Wed Jul 2 15:13:31 2014 -0700\",\n        :message =>\n      \"Run success script built back in, Thread checks removed from repo logic\"},\n       {:hash => \"a556abc734b7fc284436270bce5c52a409b58fb8\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Mon Jun 30 17:22:12 2014 -0700\",\n        :message => \"This is a\\n multi line commit message\\n so ha.\"},\n       {:hash => \"7abb9c3194ed1793e78a2928c793d9f4172afab2\",\n        :author => \"Joshua Eversmann <jeversmann@squareup.com>\",\n        :date => \"Mon Jun 30 17:22:12 2014 -0700\",\n        :message => \"Put file reading logic into GitRepo\"}]\n    end\n  end\nend\n\n# Stub Partitioner behavior for mailer preview\nmodule Partitioner\n  def self.for_build(build)\n    Base.new(build, nil)\n  end\n\n  class Base\n    def initialize(build, kochiku_yml)\n      @build = build\n      @kochiku_yml = kochiku_yml\n    end\n\n    def emails_for_commits_causing_failures\n      {}\n    end\n  end\nend\n\nclass BuildMailerPreview < ActionMailer::Preview\n  def build_break_email\n    BuildMailer.build_break_email(Build.where(:state => 'errored').first)\n  end\n\n  def build_success_email\n    BuildMailer.build_success_email(Build.where(:state => 'succeeded').first)\n  end\nend\n"
  },
  {
    "path": "spec/models/branch_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe Branch, type: :model do\n\n  it 'should fail on nil name' do\n    expect(FactoryBot.build(:branch, name: nil).valid?).to be false\n  end\n\n  it 'should fail on empty name' do\n    expect(FactoryBot.build(:branch, name: \"\").valid?).to be false\n  end\n\n  describe '#abort_in_progress_builds_behind_build' do\n    let(:branch) { FactoryBot.create(:branch) }\n\n    it 'aborts non-finished builds for a branch' do\n      build1 = branch.builds.create(state: 'succeeded', ref: to_40('1'))\n      build2 = branch.builds.create(state: 'running', ref: to_40('2'))\n      build3 = branch.builds.create(state: 'partitioning', ref: to_40('3'))\n      build4 = branch.builds.create(state: 'partitioning', ref: to_40('4'))\n      build5 = branch.builds.create(state: 'partitioning', ref: to_40('5'))\n\n      branch.abort_in_progress_builds_behind_build(build4)\n\n      expect(build1.reload.state).to eq('succeeded')\n      expect(build2.reload.state).to eq('aborted')\n      expect(build3.reload.state).to eq('aborted')\n      expect(build4.reload.state).to eq('partitioning')\n      expect(build5.reload.state).to eq('partitioning')\n    end\n  end\n\n  describe \"#last_completed_build\" do\n    let(:branch) { FactoryBot.create(:branch) }\n    subject { branch.last_completed_build }\n\n    it \"should return the most recent build in a completed state\" do\n      FactoryBot.create(:build, :branch_record => branch, :state => 'running')\n      FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded')\n      expected = FactoryBot.create(:build, :branch_record => branch, :state => 'errored')\n      FactoryBot.create(:build, :branch_record => branch, :state => 'partitioning')\n\n      should == expected\n    end\n  end\n\n  describe '#timing_data_for_recent_builds' do\n    subject { branch.timing_data_for_recent_builds.to_a }\n\n    let(:branch) { FactoryBot.create(:branch) }\n\n    context 'when the branch has never been built' do\n      it { should == [] }\n    end\n\n    context 'when the branch has one build' do\n      let!(:build) { FactoryBot.create(:build, :branch_record => branch, :state => 'succeeded') }\n\n      context 'when the build has one part' do\n        let!(:build_part) {\n          FactoryBot.create(:build_part, :build_instance => build, :kind => 'spec')\n        }\n\n        context 'when the part has zero attempts' do\n          it 'still includes the build' do\n            should == [[\n              'spec',\n              build.ref[0, 5],\n              0, 0, 0,\n              build.id,\n              'succeeded',\n              build.created_at.to_s\n            ]]\n          end\n        end\n\n        context 'when the part has an unstarted attempt' do\n          let!(:build_attempt) do\n            FactoryBot.create(\n              :build_attempt,\n              :build_part => build_part,\n              :state => 'runnable'\n            )\n          end\n\n          it 'still includes the build' do\n            build_attempt.finish!('running')\n            should == [[\n              'spec',\n              build.ref[0, 5],\n              0, 0, 0,\n              build.id,\n              'running',\n              build.created_at.to_s\n            ]]\n          end\n        end\n\n        context 'when the part has one attempt' do\n          let!(:build_attempt) do\n            FactoryBot.create(\n              :build_attempt,\n              :build_part => build_part,\n              :started_at => 12.minutes.ago,\n              :finished_at => 7.minutes.ago,\n              :state => 'passed'\n            )\n          end\n\n          it 'shows error bars, ref, and build status' do\n            should == [[\n              'spec',\n              build.ref[0, 5],\n              (build_attempt.elapsed_time / 60).round,\n              0, 0,\n              build.id,\n              'succeeded',\n              build.created_at.to_s\n            ]]\n          end\n        end\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/build_artifact_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildArtifact do\n  it \"should validate presence of log_file\" do\n    expect(BuildArtifact.new).not_to be_valid\n\n    ba = BuildArtifact.new\n    ba.log_file = File.open(FIXTURE_PATH.join('build_artifact.log'))\n    expect(ba).to be_valid\n  end\n\n  describe \"stdout_log scope\" do\n    let!(:artifact) { FactoryBot.create :build_artifact }\n    let!(:stdout_artifact) { FactoryBot.create :build_artifact, :log_file => File.open(FIXTURE_PATH + 'stdout.log.gz') }\n\n    subject { BuildArtifact.stdout_log }\n\n    it \"should return artifacts that match stdout.log\" do\n      should_not include(artifact)\n      should include(stdout_artifact)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/build_attempt_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildAttempt do\n  it \"requires a valid state\" do\n    build_attempt = BuildAttempt.new(:state => \"asasdfsdf\")\n    expect(build_attempt).not_to be_valid\n    expect(build_attempt).to have(1).error_on(:state)\n    build_attempt.state = 'runnable'\n    expect(build_attempt).to be_valid\n  end\n\n  describe \"finish!\" do\n    let(:build) { FactoryBot.create(:build, :state => 'runnable', :merge_on_success => true) }\n    let(:build_part) { FactoryBot.create(:build_part, :build_instance => build, retry_count: 2) }\n    let!(:build_attempt) { FactoryBot.create(:build_attempt, :state => 'running', :build_part => build_part) }\n\n    context \"build auto-retries\" do\n      it \"requests a rebuild if should_reattempt? is true\" do\n        allow(build_part).to receive(:should_reattempt?).and_return(true)\n        expect(build_part).to receive(:rebuild!)\n        build_attempt.finish!('failed')\n      end\n\n      it \"does not request a rebuild if should_reattempt? is false\" do\n        allow(build_part).to receive(:should_reattempt?).and_return(false)\n        expect(build_part).to_not receive(:rebuild!)\n        build_attempt.finish!('failed')\n      end\n    end\n\n    it \"calls update_state_from_parts!\" do\n      expect(build).to receive(:update_state_from_parts!).at_least(:once)\n      build_attempt.finish!('passed')\n    end\n\n    it \"sends an email for an errored build\" do\n      expect(BuildMailer).to receive(:error_email).and_return(OpenStruct.new(:deliver => nil))\n      allow(build_attempt).to receive(:should_reattempt?).and_return(false)\n      build_attempt.finish!('errored')\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/build_part_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe BuildPart do\n  let(:repository) { FactoryBot.create(:repository) }\n  let(:branch) { FactoryBot.create(:branch, :repository => repository) }\n  let(:build) { FactoryBot.create(:build, branch_record: branch, state: 'runnable', created_at: 5.minutes.ago, updated_at: 5.minutes.ago) }\n  let(:build_part) { FactoryBot.create(:build_part, :paths => [\"a\", \"b\"], :kind => \"spec\", :build_instance => build, :queue => 'ci') }\n\n  before do\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n  end\n\n  describe \"#create_and_enqueue_new_build_attempt!\" do\n    it \"should create a new build attempt\" do\n      expect {\n        build_part.create_and_enqueue_new_build_attempt!\n      }.to change(build_part.build_attempts, :count).by(1)\n    end\n\n    it \"enqueues onto the queue specified in the build part\" do\n      build_part.update_attribute(:queue, 'queueX')\n      expect(BuildAttemptJob).to receive(:enqueue_on).once do |queue, arg_hash|\n        expect(queue).to eq(\"queueX\")\n      end\n      build_part.create_and_enqueue_new_build_attempt!\n    end\n\n    it \"bumps updated_at value on the build record\" do\n      # if build.updated_at is not changed then the view caches will be stale\n      original_updated_at = build.updated_at.to_i\n      build_part.create_and_enqueue_new_build_attempt!\n      expect(build.reload.updated_at.to_i).to_not eq(original_updated_at)\n    end\n\n    it \"should enqueue the build attempt for building\" do\n      build_part.update_attributes!(:options => {\"ruby\" => \"ree\"})\n      expect(BuildAttemptJob).to receive(:enqueue_on).once do |queue, arg_hash|\n        expect(queue).to eq(\"ci\")\n        expect(arg_hash[\"build_attempt_id\"]).not_to be_blank\n        expect(arg_hash[\"build_ref\"]).not_to be_blank\n        expect(arg_hash[\"build_kind\"]).not_to be_blank\n        expect(arg_hash[\"test_files\"]).not_to be_blank\n        expect(arg_hash[\"repo_name\"]).not_to be_blank\n        expect(arg_hash[\"test_command\"]).not_to be_blank\n        expect(arg_hash[\"repo_url\"]).not_to be_blank\n        expect(arg_hash[\"options\"]).to eq({\"ruby\" => \"ree\"})\n        expect(arg_hash[\"kochiku_env\"]).to eq(\"test\")\n      end\n      build_part.create_and_enqueue_new_build_attempt!\n    end\n  end\n\n  describe \"#job_args\" do\n    let(:repository) { FactoryBot.create(:repository, :url => \"git@github.com:org/test-repo.git\") }\n\n    context \"with a git mirror specified\" do\n      before do\n        settings = SettingsAccessor.new(<<-YAML)\n        git_servers:\n          github.com:\n            type: github\n            mirror: \"git://git-mirror.example.com/\"\n        YAML\n        stub_const \"Settings\", settings\n      end\n\n      it \"should substitute the mirror\" do\n        build_attempt = build_part.build_attempts.create!(:state => 'runnable')\n        args = build_part.job_args(build_attempt)\n        expect(args[\"repo_url\"]).to eq(\"git://git-mirror.example.com/org/test-repo.git\")\n      end\n    end\n\n    context \"with no git mirror specified\" do\n      before do\n        settings = SettingsAccessor.new(<<-YAML)\n        git_servers:\n          github.com:\n            type: github\n        YAML\n        stub_const \"Settings\", settings\n      end\n\n      it \"should return the original git url\" do\n        build_attempt = build_part.build_attempts.create!(:state => 'runnable')\n        args = build_part.job_args(build_attempt)\n        expect(args[\"repo_url\"]).to eq(repository.url)\n      end\n    end\n  end\n\n  describe \"#unsuccessful?\" do\n    subject { build_part.unsuccessful? }\n\n    context \"with all successful attempts\" do\n      before do\n        2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') }\n      end\n\n      it { should be false }\n    end\n\n    context \"with one successful attempt\" do\n      before {\n        2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') }\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed')\n      }\n\n      it { should be false }\n    end\n\n    context \"with all unsuccessful attempts\" do\n      before do\n        2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed') }\n      end\n\n      it { should be true }\n    end\n  end\n\n  describe \"#status\" do\n    subject { build_part.status }\n\n    context \"with all successful attempts\" do\n      before do\n        2.times { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed') }\n      end\n\n      it { should == 'passed' }\n    end\n\n    context \"with one successful attempt\" do\n      before do\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed')\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed')\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed')\n      end\n\n      it { should == 'passed' }\n    end\n\n    context \"with no successful attempts\" do\n      before do\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'failed')\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'running')\n      end\n\n      it { should == 'running' }\n    end\n  end\n\n  describe \"#not_finished?\" do\n    subject { build_part.not_finished? }\n    context \"when not finished\" do\n      it { should be true }\n    end\n    context \"when finished\" do\n      before { FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed', :finished_at => Time.current) }\n      it { should be false }\n    end\n  end\n\n  describe \"#should_reattempt?\" do\n    let(:build_part) { FactoryBot.create(:build_part, retry_count: 1, build_instance: build) }\n\n    it \"should reattempt\" do\n      expect(build_part.should_reattempt?).to be true\n    end\n\n    context \"when we have already hit the retry count\" do\n      before do\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed')\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed')\n      end\n\n      it \"will not reattempt\" do\n        expect(build_part.should_reattempt?).to be false\n      end\n    end\n\n    context \"when we are just one away from the retry count\" do\n      before do\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed')\n      end\n\n      it \"will reattempt\" do\n        expect(build_part.should_reattempt?).to be true\n      end\n    end\n\n    context \"when it fails very fast\" do\n      let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) }\n\n      before do\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 10.seconds.ago, finished_at: Time.current)\n      end\n\n      it \"will reattempt\" do\n        expect(build_part.should_reattempt?).to be true\n      end\n    end\n\n    context \"after 5 failures\" do\n      let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) }\n\n      before do\n        5.times do\n          FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 10.seconds.ago, finished_at: Time.current)\n        end\n      end\n\n      it \"not will reattempt\" do\n        expect(build_part.should_reattempt?).to be false\n      end\n    end\n\n    context \"when it fails after a longer time\" do\n      let(:build_part) { FactoryBot.create(:build_part, retry_count: 0, build_instance: build) }\n\n      before do\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'errored', started_at: 70.seconds.ago, finished_at: Time.current)\n      end\n\n      it \"shouldn't reattempt\" do\n        expect(build_part.should_reattempt?).to be false\n      end\n    end\n\n    context \"when there has already been a successful attempt\" do\n      before do\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'passed')\n        FactoryBot.create(:build_attempt, build_part: build_part, state: 'failed')\n      end\n\n      it \"will not reattempt\" do\n        expect(build_part.should_reattempt?).to be false\n      end\n    end\n  end\n\n  describe \"#as_json\" do\n    subject(:json) { build_part.as_json['build_part'].with_indifferent_access }\n    context \"with a build attempt\" do\n      before do\n        FactoryBot.create(:build_attempt, :build_part => build_part, :state => 'passed')\n      end\n\n      it \"includes synthetic attributes like status\" do\n        expect(json[:status]).to eq('passed')\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/build_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe Build do\n  let(:branch) { FactoryBot.create(:branch) }\n  let(:build) { FactoryBot.create(:build, branch_record: branch) }\n  let(:parts) { [{'type' => 'cucumber', 'files' => ['a', 'b'], 'queue' => 'ci'}, {'type' => 'rspec', 'files' => ['c', 'd'], 'queue' => 'ci'}] }\n\n  before do\n    allow(GitRepo).to receive(:load_kochiku_yml).and_return(nil)\n  end\n\n  describe \"validations\" do\n    it \"requires a ref to be set\" do\n      build.ref = nil\n      expect(build).not_to be_valid\n      expect(build).to have(1).error_on(:ref)\n    end\n\n    it \"requires a branch_id to be set\" do\n      build.branch_id = nil\n      expect(build).not_to be_valid\n      expect(build).to have(1).error_on(:branch_id)\n    end\n\n    it \"should force uniqueness on ref\" do\n      build2 = FactoryBot.build(:build, branch_record: branch, ref: build.ref)\n      expect(build2).not_to be_valid\n      expect(build2).to have(1).error_on(:ref)\n    end\n  end\n\n  describe '#kochiku_yml' do\n    it 'only tries to load once if it fails' do\n      expect(GitRepo).to receive(:load_kochiku_yml).once\n      5.times do\n        build.kochiku_yml\n      end\n    end\n  end\n\n  describe \"#partition\" do\n    it \"should create a BuildPart for each path\" do\n      build.partition(parts)\n      expect(build.build_parts.map(&:kind)).to match_array(['cucumber', 'rspec'])\n      expect(build.build_parts.map(&:queue)).to match_array(['ci', 'ci'])\n      expect(build.build_parts.find_by_kind('cucumber').paths).to match_array(['a', 'b'])\n    end\n\n    it \"should change state to runnable\" do\n      expect {\n        build.partition(parts)\n      }.to change(build, :state).from('partitioning').to('runnable')\n    end\n\n    it \"creates parts with options\" do\n      build.partition([{\"type\" => \"cucumber\", \"files\" => ['a'], 'queue' => 'developer', 'options' => {\"ruby\" => \"ree\"}}])\n      build_part = build.build_parts.first\n      build_part.reload\n      expect(build_part.options).to eq({\"ruby\" => \"ree\"})\n    end\n\n    it \"should set the queue\" do\n      build.partition([{\"type\" => \"cucumber\", \"files\" => ['a'], 'queue' => 'developer'}])\n      build_part = build.build_parts.first\n      expect(build_part.queue).to eq('developer')\n    end\n\n    it \"should set the retry_count\" do\n      build.partition([{\"type\" => \"cucumber\", \"files\" => ['a'], 'queue' => 'developer', 'retry_count' => 3}])\n      build_part = build.build_parts.first\n      expect(build_part.retry_count).to eq(3)\n    end\n\n    it \"should create build attempts for each build part\" do\n      build.partition(parts)\n      build.build_parts.all? { |bp| expect(bp.build_attempts).to have(1).item }\n    end\n\n    it \"should enqueue build part jobs if repository is enabled\" do\n      expect(BuildAttemptJob).to receive(:enqueue_on).twice\n      build.partition(parts)\n    end\n\n    it \"should not enqueue build part jobs if repository is disabled\" do\n      build2 = FactoryBot.create(:build_on_disabled_repo)\n      build2.partition(parts)\n      expect(BuildAttemptJob).to receive(:enqueue_on).exactly(0).times\n      expect(build2.build_parts.reload).to be_empty\n    end\n\n    it \"rolls back any changes to the database if an error occurs\" do\n      # set parts to an illegal value\n      parts = [{'type' => 'rspec', 'files' => [], 'queue' => 'ci'}]\n\n      expect(build.build_parts).to be_empty\n      expect(build.state).to eq('partitioning')\n\n      expect { build.partition(parts) }.to raise_error(ActiveRecord::ActiveRecordError)\n\n      expect(build.build_parts.reload).to be_empty\n      expect(build.state).to eq('runnable')\n    end\n  end\n\n  describe \"#completed?\" do\n    Build::TERMINAL_STATES.each do |state|\n      it \"should be true for #{state}\" do\n        build.state = state\n        expect(build).to be_completed\n      end\n    end\n\n    (Build::STATES - Build::TERMINAL_STATES).each do |state|\n      it \"should be false for #{state}\" do\n        build.state = state\n        expect(build).not_to be_completed\n      end\n    end\n  end\n\n  describe \"#update_state_from_parts!\" do\n    let(:build) { FactoryBot.create(:build, branch_record: branch, :state => 'running') }\n    let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build) }\n    let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build) }\n\n    it \"should set a build state to running if it is successful so far, but still incomplete\" do\n      FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('running')\n    end\n\n    it \"should set build state to errored if any of its parts errored\" do\n      FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'errored')\n      FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('errored')\n    end\n\n    it \"should set build state to succeeded if all of its parts passed\" do\n      FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('succeeded')\n    end\n\n    it \"should set a build state to doomed if it has a failed part but is still has more parts to process\" do\n      FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed')\n      build.update_state_from_parts!\n      expect(build.state).to eq('doomed')\n    end\n\n    it \"should change a doomed build to failed once it is complete\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed')\n      ba2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n      build.update_state_from_parts!\n      expect(build.state).to eq('doomed')\n\n      ba2.update!(state: 'passed')\n      build.update_state_from_parts!\n      expect(build.state).to eq('failed')\n    end\n\n    it \"should set build_state to running when a failed attempt is retried\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'failed')\n      ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('running')\n    end\n\n    it \"should set build_state to doomed when an attempt is retried but other attempts are failed\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'failed')\n      ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('doomed')\n    end\n\n    it \"should ignore the old build_attempts\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'errored')\n      ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('succeeded')\n    end\n\n    it \"should not ignore old build_attempts that passed\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed')\n      ba2_2 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'errored')\n      build.update_state_from_parts!\n\n      expect(build.state).to eq('succeeded')\n    end\n\n    context \"when the build is aborted\" do\n      let(:build) { FactoryBot.create(:build, branch_record: branch, state: 'aborted') }\n\n      it \"should set state to succeeded if a build is aborted, but all of its parts passed\" do\n        # scenario is applicable if a build is aborted only after its build parts are already running\n        FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n        FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'passed')\n        build.update_state_from_parts!\n\n        expect(build.state).to eq('succeeded')\n      end\n\n      it \"should remain aborted when build attempts finish as errored or failed\" do\n        FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n        ba = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'errored')\n        build.update_state_from_parts!\n        expect(build.state).to eq('aborted')\n\n        ba.update_attributes!(state: 'failed')\n        build.update_state_from_parts!\n        expect(build.state).to eq('aborted')\n      end\n    end\n  end\n\n  describe \"#elapsed_time\" do\n    it \"returns the difference between the build creation time and the last finished time\" do\n      build.partition(parts)\n      expect(build.elapsed_time).to be_nil\n      last_attempt = BuildAttempt.find(build.build_attempts.last.id)\n      last_attempt.update_attributes(:finished_at => build.created_at + 10.minutes)\n      expect(build.elapsed_time).to be_within(1.second).of(10.minutes)\n    end\n  end\n\n  describe \"#abort!\" do\n    let(:build) { FactoryBot.create(:build, :state => 'runnable', :merge_on_success => true) }\n\n    it \"should mark the build as aborted\" do\n      expect{ build.abort! }.to change(build, :state).from('runnable').to('aborted')\n    end\n\n    it \"should strip a true merge_on_success setting\" do\n      expect{ build.abort! }.to change(build, :merge_on_success).to(false)\n    end\n\n    it \"should mark all of the build's unstarted build_attempts as aborted\" do\n      build_part1 = FactoryBot.create(:build_part, :build_instance => build)\n      build_part2 = FactoryBot.create(:build_part, :build_instance => build)\n      build_attempt_started = FactoryBot.create(:build_attempt, :build_part => build_part1, :state => 'running')\n      build_attempt_unstarted = FactoryBot.create(:build_attempt, :build_part => build_part2, :state => 'runnable')\n      build.abort!\n\n      expect(build_attempt_started.reload.state).to eq('running')\n      expect(build_attempt_unstarted.reload.state).to eq('aborted')\n    end\n  end\n\n  describe '#to_png' do\n    let(:build)     { FactoryBot.create(:build, :state => state) }\n    let(:png)       { build.to_png }\n    let(:png_color) { png.get_pixel(png.width / 2, png.height / 2) }\n\n    let(:red)   { 4151209727 }\n    let(:green) { 3019337471 }\n    let(:blue)  { 1856370687 }\n\n    context 'with succeeded state' do\n      let(:state) { 'succeeded' }\n\n      it 'returns a green status png' do\n        expect(png_color).to eq(green)\n      end\n    end\n\n    %w(failed errored aborted doomed).each do |current_state|\n      context \"with #{current_state} state\" do\n        let(:state) { current_state }\n\n        it 'returns a red status png' do\n          expect(png_color).to eq(red)\n        end\n      end\n    end\n\n    %w(partitioning runnable running).each do |current_state|\n      context \"with #{current_state} state\" do\n        let(:state) { current_state }\n\n        it 'returns a blue status png' do\n          expect(png_color).to eq(blue)\n        end\n      end\n    end\n  end\n\n  describe \"#previous_successful_build\" do\n    let(:successful_build) {\n      build.partition(parts)\n      build.build_parts.each { |part| part.last_attempt.finish!('passed') }\n      build.update_state_from_parts!\n      build.update_attribute(:updated_at, 1.minute.ago)\n      build\n    }\n\n    it \"returns nil when there are no previous successful builds for the branch\" do\n      expect(build.succeeded?).to be false\n      build2 = FactoryBot.create(:build, branch_record: branch)\n\n      expect(build.previous_successful_build).to be_nil\n      expect(build2.previous_successful_build).to be_nil\n    end\n\n    it \"returns the most recent build in state == 'succeeded' prior to this build\" do\n      stub_request(:post, /https:\\/\\/git\\.squareup\\.com\\/api\\/v3\\/repos\\/square\\/kochiku\\/statuses\\//)\n      expect(successful_build.succeeded?).to be true\n      build2 = FactoryBot.create(:build, branch_record: branch)\n      expect(build2.previous_successful_build).to eq(successful_build)\n    end\n  end\n\n  describe \"#mergable_by_kochiku??\" do\n    let(:build) { FactoryBot.create(:build, branch_record: branch) }\n\n    before do\n      expect(build.branch_record).to_not be_convergence\n      expect(build.repository.allows_kochiku_merges).to be true\n    end\n\n    context \"when merge_on_success_enabled? is true\" do\n      before do\n        build.update_attributes!(merge_on_success: true)\n        expect(build.merge_on_success_enabled?).to be true\n      end\n\n      it \"is true if it is a passed build\" do\n        build.state = 'succeeded'\n        expect(build.mergable_by_kochiku?).to be true\n      end\n\n      it \"is false if it is a failed build\" do\n        (Build::TERMINAL_STATES - ['succeeded']).each do |failed_state|\n          build.state = failed_state\n          expect(build.mergable_by_kochiku?).to be false\n        end\n      end\n    end\n\n    context \"with merge_on_success disabled\" do\n      it \"should never be true\" do\n        build.merge_on_success = false\n        build.state = 'succeeded'\n\n        expect(build.mergable_by_kochiku?).to be false\n      end\n    end\n\n    context \"when allows_kochiku_merges has been disabled on the repository\" do\n      before do\n        build.repository.update_attributes(:allows_kochiku_merges => false)\n      end\n\n      it \"should never be true\" do\n        build.merge_on_success = true\n        build.state = 'succeeded'\n\n        expect(build.mergable_by_kochiku?).to be false\n      end\n    end\n\n    context 'there is a newer build for the same branch' do\n      let(:build) {\n        FactoryBot.create(:build, branch_record: branch,\n                                  state: 'succeeded', merge_on_success: true)\n      }\n\n      before do\n        expect(build.mergable_by_kochiku?).to be true\n      end\n\n      it 'should no longer be mergable' do\n        expect(build).to receive(:newer_branch_build_exists?).and_return(true)\n        expect(build.mergable_by_kochiku?).to be false\n      end\n    end\n  end\n\n  describe \"#merge_on_success_enabled?\" do\n    it \"is true if it is a developer build with merge_on_success enabled\" do\n      build.merge_on_success = true\n      expect(build.merge_on_success_enabled?).to be true\n    end\n\n    it \"is false if it is a developer build with merge_on_success disabled\" do\n      build.merge_on_success = false\n      expect(build.merge_on_success_enabled?).to be false\n    end\n\n    context \"for a build on a convergence branch\" do\n      let(:build) { FactoryBot.create(:convergence_branch_build) }\n\n      it \"should be false\" do\n        build.merge_on_success = true\n        expect(build.merge_on_success_enabled?).to be false\n      end\n    end\n  end\n\n  describe \"#newer_branch_build_exists?\" do\n    before do\n      @build1 = FactoryBot.create(:build, branch_record: branch)\n      @build2 = FactoryBot.create(:build, branch_record: branch)\n    end\n\n    it \"should be true for the earlier build\" do\n      expect(@build1.newer_branch_build_exists?).to be true\n    end\n\n    it \"should be false for the later build\" do\n      expect(@build2.newer_branch_build_exists?).to be false\n    end\n  end\n\n  describe \"#already_failed?\" do\n    let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) }\n    it \"returns false when there exists successful build attempt\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n      expect(build.already_failed?).to eq(false)\n    end\n\n    it \"returns true when there exists no successful build attempt\" do\n      ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'failed')\n      ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'running')\n      expect(build.already_failed?).to eq(true)\n    end\n  end\n\n  describe \"#send_build_status_email!\" do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:branch) { FactoryBot.create(:branch, repository: repository) }\n    let(:build) { FactoryBot.create(:build, state: 'runnable', branch_record: branch) }\n    let(:build_attempt) { build.build_parts.first.build_attempts.create!(state: 'failed') }\n\n    it \"should not send a failure email if the branch has never had a successful build\" do\n      expect(BuildMailer).not_to receive(:build_break_email)\n      build.send_build_status_email!\n    end\n\n    context \"for a branch that has had a successful build\" do\n      let(:build) {\n        FactoryBot.create(:build, state: 'succeeded', branch_record: branch)\n        FactoryBot.create(:build, state: 'runnable', branch_record: branch)\n      }\n\n      it \"should not send the email if the build is not completed\" do\n        expect(BuildMailer).not_to receive(:build_break_email)\n        build.send_build_status_email!\n      end\n\n      it \"should not send the failure email if the build passed\" do\n        build.update_attribute(:state, 'succeeded')\n        expect(BuildMailer).not_to receive(:build_break_email)\n        expect(BuildMailer).to receive(:build_success_email).and_return(OpenStruct.new(:deliver => nil))\n        build.send_build_status_email!\n      end\n\n      it \"should only send the build failure email once\" do\n        build.update_attribute(:state, 'failed')\n        expect(BuildMailer).to receive(:build_break_email).once.and_return(OpenStruct.new(:deliver => nil))\n        build.send_build_status_email!\n        build.send_build_status_email!\n      end\n\n      it \"should send a fail email when the build is finished\" do\n        build.update_attribute(:state, 'failed')\n        expect(BuildMailer).to receive(:build_break_email).and_return(OpenStruct.new(:deliver => nil))\n        build.send_build_status_email!\n      end\n\n      it \"does not send a email if the repository setting is disabled\" do\n        build.update_attribute(:state, 'failed')\n        repository.update_attributes!(:send_build_failure_email => false)\n        build.reload\n        expect(BuildMailer).not_to receive(:build_break_email)\n        build.send_build_status_email!\n      end\n\n      context \"when email_on_first_failure is false\" do\n        before do\n          repository.update_attribute(:email_on_first_failure, false)\n        end\n        it \"should not send email on first build part failure\" do\n          build.update_attribute(:state, 'doomed')\n          expect(BuildMailer).to_not receive(:build_break_email)\n          build.send_build_status_email!\n        end\n\n        context \"retries enabled\" do\n          let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) }\n          let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) }\n\n          it \"should not send email before retry\" do\n            ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'running')\n            ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n\n            expect(BuildMailer).to_not receive(:build_break_email)\n\n            ba2_1.finish!('failed')\n          end\n        end\n      end\n\n      context \"when email_on_first_failure is true\" do\n        before do\n          repository.update_attribute(:email_on_first_failure, true)\n        end\n\n        context \"on a convergence branch build\" do\n          let(:branch) { FactoryBot.create(:convergence_branch, repository: repository) }\n          let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) }\n          let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => build, :retry_count => 3) }\n\n          it \"should not send email prior to retry\" do\n            ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n            ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n\n            expect(BuildMailer).to_not receive(:build_break_email)\n\n            ba2_1.finish!('failed')\n          end\n        end\n\n        context \"branch build\" do\n          let(:branch) { FactoryBot.create(:branch, repository: repository) }\n          let(:branch_build) { FactoryBot.create(:build, :state => 'runnable', :branch_record => branch) }\n          let!(:build_part_1) { FactoryBot.create(:build_part, :build_instance => branch_build, :retry_count => 3) }\n          let!(:build_part_2) { FactoryBot.create(:build_part, :build_instance => branch_build, :retry_count => 3) }\n\n          it \"should send email prior to retry\" do\n            ba1 = FactoryBot.create(:build_attempt, build_part: build_part_1, state: 'passed')\n            ba2_1 = FactoryBot.create(:build_attempt, build_part: build_part_2, state: 'running')\n\n            expect(BuildMailer).to receive(:build_break_email).once.and_return(OpenStruct.new(:deliver => nil))\n\n            ba2_1.finish!('failed')\n          end\n        end\n      end\n\n      context \"for a build not on a convergence branch\" do\n        before do\n          expect(branch).to_not be_convergence\n        end\n\n        it \"should not send a failure email\" do\n          expect(BuildMailer).not_to receive(:build_break_email)\n          build.send_build_status_email!\n        end\n\n        it \"should send a success email\" do\n          build.update_attribute(:state, 'succeeded')\n          expect(BuildMailer).to receive(:build_success_email).and_return(OpenStruct.new(:deliver => nil))\n          build.send_build_status_email!\n        end\n      end\n    end\n  end\n\n  describe '#as_json' do\n    it 'returns a hash with elapsed_time' do\n      build.partition(parts)\n      hash = build.as_json\n      expect(hash['build'].key?('elapsed_time')).to eq(true)\n      expect(hash['build']['elapsed_time']).to eq(build.elapsed_time)\n      last_attempt = BuildAttempt.find(build.build_attempts.last.id)\n      last_attempt.update_attributes(:finished_at => build.created_at + 10.minutes)\n      hash = build.as_json\n      expect(hash['build'].key?('elapsed_time')).to eq(true)\n      expect(hash['build']['elapsed_time']).to eq(build.elapsed_time)\n    end\n\n    it 'returns a hash with out test_command' do\n      build.partition(parts)\n      hash = build.as_json\n      expect(hash['build'].key?('test_command')).to eq(false)\n    end\n\n    it 'returns elapsed_time even when other options are used' do\n      build.partition(parts)\n      hash = build.as_json(include: :build_parts)\n      expect(hash['build'].key?('elapsed_time')).to eq(true)\n    end\n\n    it 'allows overriding :methods option' do\n      build.partition(parts)\n      hash = build.as_json(methods: :idle_time)\n      expect(hash['build'].key?('elapsed_time')).to eq(false)\n      expect(hash['build'].key?('idle_time')).to eq(true)\n    end\n  end\nend\n"
  },
  {
    "path": "spec/models/repository_observer_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe RepositoryObserver do\n  subject { RepositoryObserver.instance }\n  let(:repository) { FactoryBot.create(:repository, :url => \"git@git.example.com:square/web.git\") }\n\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      git.example.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n\n    allow(subject).to receive(:should_contact_github?).and_return(true)\n  end\n\n  it \"creates the hook if enabled\" do\n    stub_request(:get, \"#{repository.base_api_url}/hooks\").with do |request|\n      expect(request.headers[\"Authorization\"]).to eq(\"token #{GithubRequest::OAUTH_TOKEN}\")\n      true\n    end.to_return(:body => '[]')\n    stub_request(:post, \"#{repository.base_api_url}/hooks\").with do |request|\n      expect(request.headers[\"Authorization\"]).to eq(\"token #{GithubRequest::OAUTH_TOKEN}\")\n      body = JSON.parse(request.body)\n      expect(body[\"name\"]).to eq(\"web\")\n      expect(body[\"events\"]).to eq(['pull_request'])\n      expect(body[\"active\"]).to eq(true)\n      true\n    end.to_return(:body => '[]')\n    repository.build_pull_requests = true\n    subject.after_save(repository)\n  end\nend\n"
  },
  {
    "path": "spec/models/repository_spec.rb",
    "content": "require 'spec_helper'\n\ndescribe Repository do\n  before do\n    settings = SettingsAccessor.new(<<-YAML)\n    git_servers:\n      stash.example.com:\n        type: stash\n      git.example.com:\n        type: github\n        aliases:\n          - git-alias.example.com\n      github.com:\n        type: github\n    YAML\n    stub_const \"Settings\", settings\n  end\n\n  describe '.lookup_by_url' do\n    it 'should return the Repository (straightforward)' do\n      repo = FactoryBot.create(:repository)\n      expect(Repository.lookup_by_url(repo.url)).to eq(repo)\n    end\n\n    it 'should return the Repository when a host alias is used during creation' do\n      repo = FactoryBot.create(:repository, url: \"git@git-alias.example.com:square/some-repo.git\")\n      expect(Repository.lookup_by_url(\"git@git.example.com:square/some-repo.git\")).to eq(repo)\n    end\n\n    it 'should return the Repository when a host alias is used during lookup' do\n      repo = FactoryBot.create(:repository, url: \"git@git.example.com:square/some-repo.git\")\n      expect(Repository.lookup_by_url(\"git@git-alias.example.com:square/some-repo.git\")).to eq(repo)\n    end\n\n    it 'should return nil if lookup fails' do\n      expect(\n        Repository.lookup_by_url(\"git@git-alias.example.com:square/some-repo.git\")\n      ).to be_nil\n    end\n  end\n\n  describe 'creation' do\n    it 'should extract attributes from the url' do\n      repo = Repository.new(url: \"git://git.example.com/who/what.git\")\n      expect(repo.host).to eq('git.example.com')\n      expect(repo.namespace).to eq('who')\n      expect(repo.name).to eq('what')\n    end\n\n    it 'should not allow url to trump explicit values' do\n      repo = Repository.new(name: 'explicit_name',\n                            namespace: 'explicit_namespace',\n                            host: 'git-alias.example.com')\n      repo.url = \"git://git.example.com/who/what.git\"\n      expect(repo.name).to eq('explicit_name')\n      expect(repo.namespace).to eq('explicit_namespace')\n      expect(repo.host).to eq('git-alias.example.com')\n    end\n  end\n\n  describe 'validations' do\n\n    context 'for url' do\n      it \"should add a error on url, if url is an an unsupported format\" do\n        repo = Repository.new(url: \"file://data/git/fun-proj.git\")\n        expect(repo).to have(1).error_on(:url)\n        expect(repo.errors_on(:url)).to include(\"is not in a format supported by Kochiku\")\n      end\n\n      it \"should add an error on url on unknown git server\" do\n        repo = Repository.new(url: \"git@example.com:who/what.git\")\n        expect(repo).to have(1).error_on(:url)\n        expect(repo.errors_on(:url)).to include(\"host is not in Kochiku's list of git servers\")\n      end\n    end\n\n    context \"when name\" do\n      context \"is set\" do\n        it \"leaves it as is\" do\n          repo = Repository.new(url: \"git://git.example.com/square/kochiku-name.git\",\n                                name: \"another_repo\")\n          repo.valid?\n          expect(repo.name).to eq(\"another_repo\")\n        end\n      end\n\n      context \"is not set when saving\" do\n        it \"sets the name based on the repository url\" do\n          repo = Repository.new(url: \"git://git.example.com/square/kochiku-name.git\")\n          repo.valid?\n          expect(repo.name).to eq(\"kochiku-name\")\n        end\n      end\n    end\n\n    context \"name\" do\n      before do\n        @repo1 = FactoryBot.create(:repository, url: \"git@git.example.com:kansas/kansas-city.git\")\n      end\n\n      it \"should allow two repositories with the same name from different namespaces\" do\n        repo2 = Repository.new(url: \"git://git.example.com/missouri/kansas-city.git\")\n        expect(repo2).to be_valid\n      end\n\n      it \"should not allow two repositories with the same name and namespaces\" do\n        repo2 = Repository.new(url: \"git://github.com/kansas/kansas-city.git\")\n        repo2.valid?\n        expect(repo2).to have(1).error_on(:name)\n        expect(repo2.errors.full_messages).to include(\"Namespace + Name combination already exists\")\n      end\n    end\n\n  end\n\n  context \"#interested_github_events\" do\n    it 'includes push if run_ci is enabled' do\n      expect(Repository.new(:run_ci => true).interested_github_events).to eq(['pull_request', 'push'])\n    end\n    it 'does not include push if run_ci is enabled' do\n      expect(Repository.new(:run_ci => false).interested_github_events).to eq(['pull_request'])\n    end\n  end\n\n  context \"#promotion_refs\" do\n    it \"is an empty array when promotion_refs is a empty string\" do\n      expect(Repository.new(:on_green_update => \"\").promotion_refs).to eq([])\n    end\n\n    it \"is an empty array when promotion_refs is a blank string\" do\n      expect(Repository.new(:on_green_update => \"   \").promotion_refs).to eq([])\n    end\n\n    it \"is an empty array when promotion_refs is comma\" do\n      expect(Repository.new(:on_green_update => \"  , \").promotion_refs).to eq([])\n    end\n\n    it \"splits on comma's\" do\n      expect(Repository.new(:on_green_update => \"a,b,c\").promotion_refs).to eq(%w( a b c ))\n    end\n  end\n\n  context \"#base_api_url\" do\n    it \"handles ssh urls\" do\n      repo = Repository.new(url: \"git@git.example.com:square/kochiku.git\")\n      expect(repo.base_api_url).to eq(\"https://git.example.com/api/v3/repos/square/kochiku\")\n    end\n  end\n\n  context \"#base_html_url\" do\n    it \"handles ssh urls\" do\n      repo = Repository.new(url: \"git@git.example.com:square/kochiku.git\")\n      expect(repo.base_html_url).to eq(\"https://git.example.com/square/kochiku\")\n    end\n    it \"handles http urls\" do\n      repo = Repository.new(url: \"http://git.example.com/square/kochiku.git\")\n      expect(repo.base_html_url).to eq(\"https://git.example.com/square/kochiku\")\n    end\n    it \"handles https urls\" do\n      repo = Repository.new(url: \"https://git.example.com/square/kochiku.git\")\n      expect(repo.base_html_url).to eq(\"https://git.example.com/square/kochiku\")\n    end\n    it \"handles git read only urls\" do\n      repo = Repository.new(url: \"git://git.example.com/square/kochiku.git\")\n      expect(repo.base_html_url).to eq(\"https://git.example.com/square/kochiku\")\n    end\n  end\n\n  context \"#run_ci=\" do\n    it \"converts the checkbox to bool\" do\n      repository = FactoryBot.create(:repository)\n      repository.run_ci = \"1\"\n      repository.save\n      repository.reload\n      expect(repository.run_ci).to eq(true)\n      repository.run_ci = \"0\"\n      repository.save\n      repository.reload\n      expect(repository.run_ci).to eq(false)\n    end\n  end\n\n  context \"#build_pull_requests=\" do\n    it \"converts the checkbox to bool\" do\n      repository = FactoryBot.create(:repository)\n      repository.build_pull_requests = \"1\"\n      repository.save\n      repository.reload\n      expect(repository.build_pull_requests).to eq(true)\n      repository.build_pull_requests = \"0\"\n      repository.save\n      repository.reload\n      expect(repository.build_pull_requests).to eq(false)\n    end\n  end\n\n  it \"saves build tags\" do\n    repository = FactoryBot.create(:repository)\n    repository.on_green_update = \"1,2,3\"\n    repository.save\n    repository.reload\n    expect(repository.on_green_update).to eq(\"1,2,3\")\n  end\n\n  describe '#build_for_commit' do\n    let!(:repositoryA) { FactoryBot.create(:repository) }\n    let!(:repositoryB) { FactoryBot.create(:repository) }\n    let!(:branchA1) { FactoryBot.create(:branch, repository: repositoryA) }\n    let!(:branchB1) { FactoryBot.create(:branch, repository: repositoryB) }\n    let(:sha) { to_40('a') }\n\n    it \"should return the build associated with the repository\" do\n      buildA1 = FactoryBot.create(:build, branch_record: branchA1, ref: sha)\n      expect(repositoryA.build_for_commit(sha)).to eq(buildA1)\n      expect(repositoryB.build_for_commit(sha)).to be_nil\n\n      buildB1 = FactoryBot.create(:build, branch_record: branchB1, ref: sha)\n      expect(repositoryA.build_for_commit(sha)).to eq(buildA1)\n      expect(repositoryB.build_for_commit(sha)).to eq(buildB1)\n    end\n  end\n\n  describe '#ensure_build_exists' do\n    let(:repository) { FactoryBot.create(:repository) }\n    let(:branch) { FactoryBot.create(:branch, repository: repository) }\n\n    it 'creates a new build only if one does not exist' do\n      sha = to_40('abcdef')\n      build1 = repository.ensure_build_exists(sha, branch)\n      build2 = repository.ensure_build_exists(sha, branch)\n\n      expect(build1).not_to eq(nil)\n      expect(build1).to eq(build2)\n\n      expect(build1.branch_record).to eq(branch)\n      expect(build1.ref).to eq(sha)\n      expect(build1.state).to eq('partitioning')\n    end\n  end\nend\n"
  },
  {
    "path": "spec/routes_spec.rb",
    "content": "require 'spec_helper'\n\nRSpec.describe \"routes\", :type => :routing do\n\n  describe '/badge/org_name/repo_name' do\n    specify {\n      expect(get: '/badge/org_name/repo_name?branch=moonwalker').to route_to(\n        controller: \"branches\",\n        action: \"badge\",\n        repository_path: \"org_name/repo_name\",\n        branch: \"moonwalker\"\n      )\n    }\n  end\n\n  context \"branches at\" do\n    describe '/:repository_path/:id' do\n      it 'to branch show page' do\n        expect(:get => \"/org_name/repo_name/bug-fix\").to route_to(\n          :controller => \"branches\",\n          :action => \"show\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"bug-fix\"\n        )\n      end\n\n      it 'to support branch names with slashes' do\n        expect(:get => \"/org_name/repo_name/rob/bug-fix\").to route_to(\n          :controller => \"branches\",\n          :action => \"show\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"rob/bug-fix\"\n        )\n      end\n\n      it 'to support branch names with dots' do\n        expect(:get => \"/org_name/repo_name/rob.bug-fix\").to route_to(\n          :controller => \"branches\",\n          :action => \"show\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"rob.bug-fix\"\n        )\n      end\n    end\n\n    describe '/:repository_path/:id member routes' do\n      it 'to a sub page' do\n        expect(:get => \"/org_name/repo_name/bug-fix/health\").to route_to(\n          :controller => \"branches\",\n          :action => \"health\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"bug-fix\"\n        )\n      end\n\n      it 'to support branch names with slashes' do\n        expect(:get => \"/org_name/repo_name/rob/bug-fix/health\").to route_to(\n          :controller => \"branches\",\n          :action => \"health\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"rob/bug-fix\"\n        )\n      end\n\n      it 'to support branch names with dots' do\n        expect(:get => \"/org_name/repo_name/rob.bug-fix/health\").to route_to(\n          :controller => \"branches\",\n          :action => \"health\",\n          :repository_path => \"org_name/repo_name\",\n          :id => \"rob.bug-fix\"\n        )\n      end\n    end\n  end\nend\n"
  },
  {
    "path": "spec/spec_helper.rb",
    "content": "# This file is copied to spec/ when you run 'rails generate rspec:install'\nENV[\"RAILS_ENV\"] ||= 'test'\nrequire File.expand_path(\"../../config/environment\", __FILE__)\nrequire 'rspec/rails'\nrequire 'rspec/collection_matchers'\nrequire 'webmock/rspec'\nrequire 'nokogiri'\nrequire 'factory_bot'\nrequire 'capybara/rspec'\nrequire 'git_blame'\n\ninclude ActionDispatch::TestProcess\n\nFIXTURE_PATH = Rails.root.join('spec', 'fixtures')\n\n# Requires supporting ruby files with custom matchers and macros, etc,\n# in spec/support/ and its subdirectories.\nDir[Rails.root.join(\"spec/support/**/*.rb\")].each { |f| require f }\n\n# Checks for pending migrations before tests are run.\nActiveRecord::Migration.maintain_test_schema!\n\n# Test decorators independent of ActionController\n# https://github.com/drapergem/draper#isolated-tests\nDraper::ViewContext.test_strategy :fast\n\nRSpec.configure do |config|\n  config.expect_with :rspec do |c|\n    c.syntax = :expect\n  end\n\n  config.mock_with :rspec do |mocks|\n    # Cause any verifying double instantiation for a class that does not\n    # exist to raise, protecting against incorrectly spelt names.\n    mocks.verify_doubled_constant_names = true\n  end\n\n  config.fixture_path = FIXTURE_PATH\n\n  # If you're not using ActiveRecord, or you'd prefer not to run each of your\n  # examples within a transaction, remove the following line or assign false\n  # instead of true.\n  config.use_transactional_fixtures = true\n\n  # Define which fixtures should be globally available. Set to :all to load everything\n  # config.global_fixtures = :all\n\n  # RSpec Rails can automatically mix in different behaviours to your tests\n  # based on their file location, for example enabling you to call `get` and\n  # `post` in specs under `spec/controllers`.\n  config.infer_spec_type_from_file_location!\n\n  # lil speed increase because we are not spawning threads in our tests\n  config.threadsafe = false\n\n  config.example_status_persistence_file_path = \"./spec/examples.txt\"\n\n  config.before :each do\n    WebMock.disable_net_connect!\n    allow(JobBase).to receive(:enqueue_in)\n\n    allow(GitBlame).to receive(:git_names_and_emails_since_last_green).and_return(\"\")\n    allow(GitBlame).to receive(:git_names_and_emails_in_branch).and_return(\"\")\n    allow(GitBlame).to receive(:changes_since_last_green).and_return([])\n    allow(GitBlame).to receive(:changes_in_branch).and_return([])\n    allow(GitBlame).to receive(:files_changed_since_last_build).and_return([])\n    allow(GitBlame).to receive(:files_changed_since_last_green).and_return([])\n    allow(GitBlame).to receive(:files_changed_in_branch).and_return([])\n\n    ActionMailer::Base.deliveries.clear\n  end\nend\n"
  },
  {
    "path": "spec/support/command_stubber.rb",
    "content": "class CommandStubber\n  include RSpec::Mocks::ExampleMethods\n\n  attr_accessor :executed_commands, :fake_command_output\n\n  def initialize\n    @executed_commands = []\n    @fake_command_output = \"fake command output\"\n\n    # Always stub to prevent executing git commands.\n    stub_capture2e\n  end\n\n  def create_stubbed_process_status(exitstatus = 0)\n    double(\n      exitstatus: exitstatus,\n      success?: exitstatus == 0\n    )\n  end\n\n  def stub_capture2e_failure(fail_on_cmd)\n    allow(Open3).to receive(:capture2e) do |*cmd|\n      # cmd is an Array in the format: [{'env' => 'variable'}, 'echo baz']\n      # where the hash with environment variables is optional\n      @executed_commands << cmd\n      exitstatus =\n        if fail_on_cmd && cmd.any? { |a| a.is_a?(String) && a.start_with?(fail_on_cmd) }\n          1\n        else\n          0\n        end\n      [@fake_command_output, create_stubbed_process_status(exitstatus)]\n    end\n  end\n\n  def stub_capture2e\n    stub_capture2e_failure(nil)\n  end\n\n  def check_cmd_executed(expected_cmd)\n    found = @executed_commands.any? do |commands|\n      commands.any? { |cmd| cmd =~ /^#{expected_cmd}.*/ }\n    end\n    raise Exception, \"Failed to find #{expected_cmd} in executed commands\" unless found\n  end\nend\n"
  },
  {
    "path": "spec/support/custom_argument_matchers.rb",
    "content": "RSpec::Matchers.define :a_string do |x|\n  match { |actual| actual.instance_of?(String) }\nend\n"
  },
  {
    "path": "spec/support/factories.rb",
    "content": "FactoryBot.define do\n  factory :branch do\n    sequence(:name) { |n| \"branch_#{n}\" }\n    association :repository\n\n    factory :convergence_branch do\n      name \"1-x-stable\"\n      convergence true\n    end\n\n    factory :master_branch do\n      name \"master\"\n      convergence true\n    end\n\n    factory :branch_on_disabled_repo do\n      association :repository, factory: :disabled_repository\n    end\n  end\n\n  factory :build do\n    state 'partitioning'\n    ref { SecureRandom.hex(20) } # 20 is the length in bytes, resulting string is twice n\n    association :branch_record, factory: :branch\n\n    factory :convergence_branch_build do\n      association :branch_record, :factory => :convergence_branch\n    end\n\n    factory :completed_build do\n      state ['failed', 'succeeded'].sample\n\n      # specify num_build_parts on the factory to create a build with more than 1 build_part\n      transient do\n        num_build_parts 1\n      end\n\n      after(:create) do |build_instance, evaluator|\n        create_list(:build_part_with_build_attempt, evaluator.num_build_parts, build_instance: build_instance)\n      end\n    end\n\n    factory :build_on_disabled_repo do\n      association :branch_record, factory: :branch_on_disabled_repo\n    end\n  end\n\n  factory :build_part do\n    association :build_instance, :factory => :build, :state => 'runnable'\n    kind :test\n    paths [\"/foo/1.test\", \"foo/baz/a.test\", \"foo/baz/b.test\"]\n    queue 'ci'\n\n    factory :build_part_with_build_attempt do\n      after(:create) do |build_part, evaluator|\n        create_list(:completed_build_attempt, 1, build_part: build_part)\n      end\n    end\n  end\n\n  factory :build_attempt do\n    build_part\n    state 'runnable'\n\n    factory :completed_build_attempt do\n      state { build_part.build_instance.state == 'succeeded' ? 'passed' : 'failed' }\n      finished_at { Time.current }\n    end\n  end\n\n  factory :build_artifact do\n    association :build_attempt, :state => 'failed'\n    log_file File.open(FIXTURE_PATH + \"build_artifact.log\")\n\n    factory :stdout_build_artifact do\n      log_file File.open(FIXTURE_PATH + \"stdout.log\")\n    end\n  end\n\n  factory :repository do\n    sequence(:url) { |n| \"git@github.com:org_name/test-repo#{n}.git\" } # these repos do not exist on purpose\n    test_command \"script/ci worker\"\n    on_green_update 'last-green-build'\n    allows_kochiku_merges true\n    enabled true\n\n    factory :stash_repository do\n      sequence(:url) { |n| \"git@stash.example.com:bucket_name/test-repo#{n}.git\" }\n    end\n\n    factory :disabled_repository do\n      enabled false\n    end\n  end\nend\n"
  },
  {
    "path": "spec/support/git_spec_helper.rb",
    "content": "# template=/dev/null to ignore any global templatedir the developer may have\n# configured on their machine. Pipe to /dev/null to ignore the warning about\n# no template dir found.\ndef suppressed_git_init\n  `git init --template=/dev/null 2> /dev/null`\nend\n"
  },
  {
    "path": "spec/support/sha_helper.rb",
    "content": "def to_40(short)\n  multiplier = (40.0 / short.length).ceil\n  (short * multiplier).slice(0, 40)\nend\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.flot.categories.js",
    "content": "/* Flot plugin for plotting textual data or categories.\r\n\r\nCopyright (c) 2007-2013 IOLA and Ole Laursen.\r\nLicensed under the MIT license.\r\n\r\nConsider a dataset like [[\"February\", 34], [\"March\", 20], ...]. This plugin\r\nallows you to plot such a dataset directly.\r\n\r\nTo enable it, you must specify mode: \"categories\" on the axis with the textual\r\nlabels, e.g.\r\n\r\n\t$.plot(\"#placeholder\", data, { xaxis: { mode: \"categories\" } });\r\n\r\nBy default, the labels are ordered as they are met in the data series. If you\r\nneed a different ordering, you can specify \"categories\" on the axis options\r\nand list the categories there:\r\n\r\n\txaxis: {\r\n\t\tmode: \"categories\",\r\n\t\tcategories: [\"February\", \"March\", \"April\"]\r\n\t}\r\n\r\nIf you need to customize the distances between the categories, you can specify\r\n\"categories\" as an object mapping labels to values\r\n\r\n\txaxis: {\r\n\t\tmode: \"categories\",\r\n\t\tcategories: { \"February\": 1, \"March\": 3, \"April\": 4 }\r\n\t}\r\n\r\nIf you don't specify all categories, the remaining categories will be numbered\r\nfrom the max value plus 1 (with a spacing of 1 between each).\r\n\r\nInternally, the plugin works by transforming the input data through an auto-\r\ngenerated mapping where the first category becomes 0, the second 1, etc.\r\nHence, a point like [\"February\", 34] becomes [0, 34] internally in Flot (this\r\nis visible in hover and click events that return numbers rather than the\r\ncategory labels). The plugin also overrides the tick generator to spit out the\r\ncategories as ticks instead of the values.\r\n\r\nIf you need to map a value back to its label, the mapping is always accessible\r\nas \"categories\" on the axis object, e.g. plot.getAxes().xaxis.categories.\r\n\r\n*/\r\n\r\n(function ($) {\r\n    var options = {\r\n        xaxis: {\r\n            categories: null\r\n        },\r\n        yaxis: {\r\n            categories: null\r\n        }\r\n    };\r\n    \r\n    function processRawData(plot, series, data, datapoints) {\r\n        // if categories are enabled, we need to disable\r\n        // auto-transformation to numbers so the strings are intact\r\n        // for later processing\r\n\r\n        var xCategories = series.xaxis.options.mode == \"categories\",\r\n            yCategories = series.yaxis.options.mode == \"categories\";\r\n        \r\n        if (!(xCategories || yCategories))\r\n            return;\r\n\r\n        var format = datapoints.format;\r\n\r\n        if (!format) {\r\n            // FIXME: auto-detection should really not be defined here\r\n            var s = series;\r\n            format = [];\r\n            format.push({ x: true, number: true, required: true });\r\n            format.push({ y: true, number: true, required: true });\r\n\r\n            if (s.bars.show || (s.lines.show && s.lines.fill)) {\r\n                var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero));\r\n                format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale });\r\n                if (s.bars.horizontal) {\r\n                    delete format[format.length - 1].y;\r\n                    format[format.length - 1].x = true;\r\n                }\r\n            }\r\n            \r\n            datapoints.format = format;\r\n        }\r\n\r\n        for (var m = 0; m < format.length; ++m) {\r\n            if (format[m].x && xCategories)\r\n                format[m].number = false;\r\n            \r\n            if (format[m].y && yCategories)\r\n                format[m].number = false;\r\n        }\r\n    }\r\n\r\n    function getNextIndex(categories) {\r\n        var index = -1;\r\n        \r\n        for (var v in categories)\r\n            if (categories[v] > index)\r\n                index = categories[v];\r\n\r\n        return index + 1;\r\n    }\r\n\r\n    function categoriesTickGenerator(axis) {\r\n        var res = [];\r\n        for (var label in axis.categories) {\r\n            var v = axis.categories[label];\r\n            if (v >= axis.min && v <= axis.max)\r\n                res.push([v, label]);\r\n        }\r\n\r\n        res.sort(function (a, b) { return a[0] - b[0]; });\r\n\r\n        return res;\r\n    }\r\n    \r\n    function setupCategoriesForAxis(series, axis, datapoints) {\r\n        if (series[axis].options.mode != \"categories\")\r\n            return;\r\n        \r\n        if (!series[axis].categories) {\r\n            // parse options\r\n            var c = {}, o = series[axis].options.categories || {};\r\n            if ($.isArray(o)) {\r\n                for (var i = 0; i < o.length; ++i)\r\n                    c[o[i]] = i;\r\n            }\r\n            else {\r\n                for (var v in o)\r\n                    c[v] = o[v];\r\n            }\r\n            \r\n            series[axis].categories = c;\r\n        }\r\n\r\n        // fix ticks\r\n        if (!series[axis].options.ticks)\r\n            series[axis].options.ticks = categoriesTickGenerator;\r\n\r\n        transformPointsOnAxis(datapoints, axis, series[axis].categories);\r\n    }\r\n    \r\n    function transformPointsOnAxis(datapoints, axis, categories) {\r\n        // go through the points, transforming them\r\n        var points = datapoints.points,\r\n            ps = datapoints.pointsize,\r\n            format = datapoints.format,\r\n            formatColumn = axis.charAt(0),\r\n            index = getNextIndex(categories);\r\n\r\n        for (var i = 0; i < points.length; i += ps) {\r\n            if (points[i] == null)\r\n                continue;\r\n            \r\n            for (var m = 0; m < ps; ++m) {\r\n                var val = points[i + m];\r\n\r\n                if (val == null || !format[m][formatColumn])\r\n                    continue;\r\n\r\n                if (!(val in categories)) {\r\n                    categories[val] = index;\r\n                    ++index;\r\n                }\r\n                \r\n                points[i + m] = categories[val];\r\n            }\r\n        }\r\n    }\r\n\r\n    function processDatapoints(plot, series, datapoints) {\r\n        setupCategoriesForAxis(series, \"xaxis\", datapoints);\r\n        setupCategoriesForAxis(series, \"yaxis\", datapoints);\r\n    }\r\n\r\n    function init(plot) {\r\n        plot.hooks.processRawData.push(processRawData);\r\n        plot.hooks.processDatapoints.push(processDatapoints);\r\n    }\r\n    \r\n    $.plot.plugins.push({\r\n        init: init,\r\n        options: options,\r\n        name: 'categories',\r\n        version: '1.0'\r\n    });\r\n})(jQuery);\r\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.flot.errorbars.js",
    "content": "/* Flot plugin for plotting error bars.\r\n\r\nCopyright (c) 2007-2013 IOLA and Ole Laursen.\r\nLicensed under the MIT license.\r\n\r\nError bars are used to show standard deviation and other statistical\r\nproperties in a plot.\r\n\r\n* Created by Rui Pereira  -  rui (dot) pereira (at) gmail (dot) com\r\n\r\nThis plugin allows you to plot error-bars over points. Set \"errorbars\" inside\r\nthe points series to the axis name over which there will be error values in\r\nyour data array (*even* if you do not intend to plot them later, by setting\r\n\"show: null\" on xerr/yerr).\r\n\r\nThe plugin supports these options:\r\n\r\n\tseries: {\r\n\t\tpoints: {\r\n\t\t\terrorbars: \"x\" or \"y\" or \"xy\",\r\n\t\t\txerr: {\r\n\t\t\t\tshow: null/false or true,\r\n\t\t\t\tasymmetric: null/false or true,\r\n\t\t\t\tupperCap: null or \"-\" or function,\r\n\t\t\t\tlowerCap: null or \"-\" or function,\r\n\t\t\t\tcolor: null or color,\r\n\t\t\t\tradius: null or number\r\n\t\t\t},\r\n\t\t\tyerr: { same options as xerr }\r\n\t\t}\r\n\t}\r\n\r\nEach data point array is expected to be of the type:\r\n\r\n\t\"x\"  [ x, y, xerr ]\r\n\t\"y\"  [ x, y, yerr ]\r\n\t\"xy\" [ x, y, xerr, yerr ]\r\n\r\nWhere xerr becomes xerr_lower,xerr_upper for the asymmetric error case, and\r\nequivalently for yerr. Eg., a datapoint for the \"xy\" case with symmetric\r\nerror-bars on X and asymmetric on Y would be:\r\n\r\n\t[ x, y, xerr, yerr_lower, yerr_upper ]\r\n\r\nBy default no end caps are drawn. Setting upperCap and/or lowerCap to \"-\" will\r\ndraw a small cap perpendicular to the error bar. They can also be set to a\r\nuser-defined drawing function, with (ctx, x, y, radius) as parameters, as eg.\r\n\r\n\tfunction drawSemiCircle( ctx, x, y, radius ) {\r\n\t\tctx.beginPath();\r\n\t\tctx.arc( x, y, radius, 0, Math.PI, false );\r\n\t\tctx.moveTo( x - radius, y );\r\n\t\tctx.lineTo( x + radius, y );\r\n\t\tctx.stroke();\r\n\t}\r\n\r\nColor and radius both default to the same ones of the points series if not\r\nset. The independent radius parameter on xerr/yerr is useful for the case when\r\nwe may want to add error-bars to a line, without showing the interconnecting\r\npoints (with radius: 0), and still showing end caps on the error-bars.\r\nshadowSize and lineWidth are derived as well from the points series.\r\n\r\n*/\r\n\r\n(function ($) {\r\n    var options = {\r\n        series: {\r\n            points: {\r\n                errorbars: null, //should be 'x', 'y' or 'xy'\r\n                xerr: { err: 'x', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null},\r\n                yerr: { err: 'y', show: null, asymmetric: null, upperCap: null, lowerCap: null, color: null, radius: null}\r\n            }\r\n        }\r\n    };\r\n\r\n    function processRawData(plot, series, data, datapoints){\r\n        if (!series.points.errorbars)\r\n            return;\r\n\r\n        // x,y values\r\n        var format = [\r\n            { x: true, number: true, required: true },\r\n            { y: true, number: true, required: true }\r\n        ];\r\n\r\n        var errors = series.points.errorbars;\r\n        // error bars - first X then Y\r\n        if (errors == 'x' || errors == 'xy') {\r\n            // lower / upper error\r\n            if (series.points.xerr.asymmetric) {\r\n                format.push({ x: true, number: true, required: true });\r\n                format.push({ x: true, number: true, required: true });\r\n            } else\r\n                format.push({ x: true, number: true, required: true });\r\n        }\r\n        if (errors == 'y' || errors == 'xy') {\r\n            // lower / upper error\r\n            if (series.points.yerr.asymmetric) {\r\n                format.push({ y: true, number: true, required: true });\r\n                format.push({ y: true, number: true, required: true });\r\n            } else\r\n                format.push({ y: true, number: true, required: true });\r\n        }\r\n        datapoints.format = format;\r\n    }\r\n\r\n    function parseErrors(series, i){\r\n\r\n        var points = series.datapoints.points;\r\n\r\n        // read errors from points array\r\n        var exl = null,\r\n                exu = null,\r\n                eyl = null,\r\n                eyu = null;\r\n        var xerr = series.points.xerr,\r\n                yerr = series.points.yerr;\r\n\r\n        var eb = series.points.errorbars;\r\n        // error bars - first X\r\n        if (eb == 'x' || eb == 'xy') {\r\n            if (xerr.asymmetric) {\r\n                exl = points[i + 2];\r\n                exu = points[i + 3];\r\n                if (eb == 'xy')\r\n                    if (yerr.asymmetric){\r\n                        eyl = points[i + 4];\r\n                        eyu = points[i + 5];\r\n                    } else eyl = points[i + 4];\r\n            } else {\r\n                exl = points[i + 2];\r\n                if (eb == 'xy')\r\n                    if (yerr.asymmetric) {\r\n                        eyl = points[i + 3];\r\n                        eyu = points[i + 4];\r\n                    } else eyl = points[i + 3];\r\n            }\r\n        // only Y\r\n        } else if (eb == 'y')\r\n            if (yerr.asymmetric) {\r\n                eyl = points[i + 2];\r\n                eyu = points[i + 3];\r\n            } else eyl = points[i + 2];\r\n\r\n        // symmetric errors?\r\n        if (exu == null) exu = exl;\r\n        if (eyu == null) eyu = eyl;\r\n\r\n        var errRanges = [exl, exu, eyl, eyu];\r\n        // nullify if not showing\r\n        if (!xerr.show){\r\n            errRanges[0] = null;\r\n            errRanges[1] = null;\r\n        }\r\n        if (!yerr.show){\r\n            errRanges[2] = null;\r\n            errRanges[3] = null;\r\n        }\r\n        return errRanges;\r\n    }\r\n\r\n    function drawSeriesErrors(plot, ctx, s){\r\n\r\n        var points = s.datapoints.points,\r\n                ps = s.datapoints.pointsize,\r\n                ax = [s.xaxis, s.yaxis],\r\n                radius = s.points.radius,\r\n                err = [s.points.xerr, s.points.yerr];\r\n\r\n        //sanity check, in case some inverted axis hack is applied to flot\r\n        var invertX = false;\r\n        if (ax[0].p2c(ax[0].max) < ax[0].p2c(ax[0].min)) {\r\n            invertX = true;\r\n            var tmp = err[0].lowerCap;\r\n            err[0].lowerCap = err[0].upperCap;\r\n            err[0].upperCap = tmp;\r\n        }\r\n\r\n        var invertY = false;\r\n        if (ax[1].p2c(ax[1].min) < ax[1].p2c(ax[1].max)) {\r\n            invertY = true;\r\n            var tmp = err[1].lowerCap;\r\n            err[1].lowerCap = err[1].upperCap;\r\n            err[1].upperCap = tmp;\r\n        }\r\n\r\n        for (var i = 0; i < s.datapoints.points.length; i += ps) {\r\n\r\n            //parse\r\n            var errRanges = parseErrors(s, i);\r\n\r\n            //cycle xerr & yerr\r\n            for (var e = 0; e < err.length; e++){\r\n\r\n                var minmax = [ax[e].min, ax[e].max];\r\n\r\n                //draw this error?\r\n                if (errRanges[e * err.length]){\r\n\r\n                    //data coordinates\r\n                    var x = points[i],\r\n                        y = points[i + 1];\r\n\r\n                    //errorbar ranges\r\n                    var upper = [x, y][e] + errRanges[e * err.length + 1],\r\n                        lower = [x, y][e] - errRanges[e * err.length];\r\n\r\n                    //points outside of the canvas\r\n                    if (err[e].err == 'x')\r\n                        if (y > ax[1].max || y < ax[1].min || upper < ax[0].min || lower > ax[0].max)\r\n                            continue;\r\n                    if (err[e].err == 'y')\r\n                        if (x > ax[0].max || x < ax[0].min || upper < ax[1].min || lower > ax[1].max)\r\n                            continue;\r\n\r\n                    // prevent errorbars getting out of the canvas\r\n                    var drawUpper = true,\r\n                        drawLower = true;\r\n\r\n                    if (upper > minmax[1]) {\r\n                        drawUpper = false;\r\n                        upper = minmax[1];\r\n                    }\r\n                    if (lower < minmax[0]) {\r\n                        drawLower = false;\r\n                        lower = minmax[0];\r\n                    }\r\n\r\n                    //sanity check, in case some inverted axis hack is applied to flot\r\n                    if ((err[e].err == 'x' && invertX) || (err[e].err == 'y' && invertY)) {\r\n                        //swap coordinates\r\n                        var tmp = lower;\r\n                        lower = upper;\r\n                        upper = tmp;\r\n                        tmp = drawLower;\r\n                        drawLower = drawUpper;\r\n                        drawUpper = tmp;\r\n                        tmp = minmax[0];\r\n                        minmax[0] = minmax[1];\r\n                        minmax[1] = tmp;\r\n                    }\r\n\r\n                    // convert to pixels\r\n                    x = ax[0].p2c(x),\r\n                        y = ax[1].p2c(y),\r\n                        upper = ax[e].p2c(upper);\r\n                    lower = ax[e].p2c(lower);\r\n                    minmax[0] = ax[e].p2c(minmax[0]);\r\n                    minmax[1] = ax[e].p2c(minmax[1]);\r\n\r\n                    //same style as points by default\r\n                    var lw = err[e].lineWidth ? err[e].lineWidth : s.points.lineWidth,\r\n                        sw = s.points.shadowSize != null ? s.points.shadowSize : s.shadowSize;\r\n\r\n                    //shadow as for points\r\n                    if (lw > 0 && sw > 0) {\r\n                        var w = sw / 2;\r\n                        ctx.lineWidth = w;\r\n                        ctx.strokeStyle = \"rgba(0,0,0,0.1)\";\r\n                        drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w + w/2, minmax);\r\n\r\n                        ctx.strokeStyle = \"rgba(0,0,0,0.2)\";\r\n                        drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, w/2, minmax);\r\n                    }\r\n\r\n                    ctx.strokeStyle = err[e].color? err[e].color: s.color;\r\n                    ctx.lineWidth = lw;\r\n                    //draw it\r\n                    drawError(ctx, err[e], x, y, upper, lower, drawUpper, drawLower, radius, 0, minmax);\r\n                }\r\n            }\r\n        }\r\n    }\r\n\r\n    function drawError(ctx,err,x,y,upper,lower,drawUpper,drawLower,radius,offset,minmax){\r\n\r\n        //shadow offset\r\n        y += offset;\r\n        upper += offset;\r\n        lower += offset;\r\n\r\n        // error bar - avoid plotting over circles\r\n        if (err.err == 'x'){\r\n            if (upper > x + radius) drawPath(ctx, [[upper,y],[Math.max(x + radius,minmax[0]),y]]);\r\n            else drawUpper = false;\r\n            if (lower < x - radius) drawPath(ctx, [[Math.min(x - radius,minmax[1]),y],[lower,y]] );\r\n            else drawLower = false;\r\n        }\r\n        else {\r\n            if (upper < y - radius) drawPath(ctx, [[x,upper],[x,Math.min(y - radius,minmax[0])]] );\r\n            else drawUpper = false;\r\n            if (lower > y + radius) drawPath(ctx, [[x,Math.max(y + radius,minmax[1])],[x,lower]] );\r\n            else drawLower = false;\r\n        }\r\n\r\n        //internal radius value in errorbar, allows to plot radius 0 points and still keep proper sized caps\r\n        //this is a way to get errorbars on lines without visible connecting dots\r\n        radius = err.radius != null? err.radius: radius;\r\n\r\n        // upper cap\r\n        if (drawUpper) {\r\n            if (err.upperCap == '-'){\r\n                if (err.err=='x') drawPath(ctx, [[upper,y - radius],[upper,y + radius]] );\r\n                else drawPath(ctx, [[x - radius,upper],[x + radius,upper]] );\r\n            } else if ($.isFunction(err.upperCap)){\r\n                if (err.err=='x') err.upperCap(ctx, upper, y, radius);\r\n                else err.upperCap(ctx, x, upper, radius);\r\n            }\r\n        }\r\n        // lower cap\r\n        if (drawLower) {\r\n            if (err.lowerCap == '-'){\r\n                if (err.err=='x') drawPath(ctx, [[lower,y - radius],[lower,y + radius]] );\r\n                else drawPath(ctx, [[x - radius,lower],[x + radius,lower]] );\r\n            } else if ($.isFunction(err.lowerCap)){\r\n                if (err.err=='x') err.lowerCap(ctx, lower, y, radius);\r\n                else err.lowerCap(ctx, x, lower, radius);\r\n            }\r\n        }\r\n    }\r\n\r\n    function drawPath(ctx, pts){\r\n        ctx.beginPath();\r\n        ctx.moveTo(pts[0][0], pts[0][1]);\r\n        for (var p=1; p < pts.length; p++)\r\n            ctx.lineTo(pts[p][0], pts[p][1]);\r\n        ctx.stroke();\r\n    }\r\n\r\n    function draw(plot, ctx){\r\n        var plotOffset = plot.getPlotOffset();\r\n\r\n        ctx.save();\r\n        ctx.translate(plotOffset.left, plotOffset.top);\r\n        $.each(plot.getData(), function (i, s) {\r\n            if (s.points.errorbars && (s.points.xerr.show || s.points.yerr.show))\r\n                drawSeriesErrors(plot, ctx, s);\r\n        });\r\n        ctx.restore();\r\n    }\r\n\r\n    function init(plot) {\r\n        plot.hooks.processRawData.push(processRawData);\r\n        plot.hooks.draw.push(draw);\r\n    }\r\n\r\n    $.plot.plugins.push({\r\n                init: init,\r\n                options: options,\r\n                name: 'errorbars',\r\n                version: '1.0'\r\n            });\r\n})(jQuery);\r\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.flot.js",
    "content": "/* Javascript plotting library for jQuery, version 0.8.0.\r\n\r\nCopyright (c) 2007-2013 IOLA and Ole Laursen.\r\nLicensed under the MIT license.\r\n\r\n*/\r\n\r\n// first an inline dependency, jquery.colorhelpers.js, we inline it here\r\n// for convenience\r\n\r\n/* Plugin for jQuery for working with colors.\r\n *\r\n * Version 1.1.\r\n *\r\n * Inspiration from jQuery color animation plugin by John Resig.\r\n *\r\n * Released under the MIT license by Ole Laursen, October 2009.\r\n *\r\n * Examples:\r\n *\r\n *   $.color.parse(\"#fff\").scale('rgb', 0.25).add('a', -0.5).toString()\r\n *   var c = $.color.extract($(\"#mydiv\"), 'background-color');\r\n *   console.log(c.r, c.g, c.b, c.a);\r\n *   $.color.make(100, 50, 25, 0.4).toString() // returns \"rgba(100,50,25,0.4)\"\r\n *\r\n * Note that .scale() and .add() return the same modified object\r\n * instead of making a new one.\r\n *\r\n * V. 1.1: Fix error handling so e.g. parsing an empty string does\r\n * produce a color rather than just crashing.\r\n */\r\n(function(B){B.color={};B.color.make=function(F,E,C,D){var G={};G.r=F||0;G.g=E||0;G.b=C||0;G.a=D!=null?D:1;G.add=function(J,I){for(var H=0;H<J.length;++H){G[J.charAt(H)]+=I}return G.normalize()};G.scale=function(J,I){for(var H=0;H<J.length;++H){G[J.charAt(H)]*=I}return G.normalize()};G.toString=function(){if(G.a>=1){return\"rgb(\"+[G.r,G.g,G.b].join(\",\")+\")\"}else{return\"rgba(\"+[G.r,G.g,G.b,G.a].join(\",\")+\")\"}};G.normalize=function(){function H(J,K,I){return K<J?J:(K>I?I:K)}G.r=H(0,parseInt(G.r),255);G.g=H(0,parseInt(G.g),255);G.b=H(0,parseInt(G.b),255);G.a=H(0,G.a,1);return G};G.clone=function(){return B.color.make(G.r,G.b,G.g,G.a)};return G.normalize()};B.color.extract=function(D,C){var E;do{E=D.css(C).toLowerCase();if(E!=\"\"&&E!=\"transparent\"){break}D=D.parent()}while(!B.nodeName(D.get(0),\"body\"));if(E==\"rgba(0, 0, 0, 0)\"){E=\"transparent\"}return B.color.parse(E)};B.color.parse=function(F){var E,C=B.color.make;if(E=/rgb\\(\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*\\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10))}if(E=/rgba\\(\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\s*\\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10),parseFloat(E[4]))}if(E=/rgb\\(\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*\\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55)}if(E=/rgba\\(\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\s*\\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55,parseFloat(E[4]))}if(E=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(F)){return C(parseInt(E[1],16),parseInt(E[2],16),parseInt(E[3],16))}if(E=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(F)){return C(parseInt(E[1]+E[1],16),parseInt(E[2]+E[2],16),parseInt(E[3]+E[3],16))}var D=B.trim(F).toLowerCase();if(D==\"transparent\"){return C(255,255,255,0)}else{E=A[D]||[0,0,0];return C(E[0],E[1],E[2])}};var A={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(jQuery);\r\n\r\n// the actual Flot code\r\n(function($) {\r\n\r\n\t// Cache the prototype hasOwnProperty for faster access\r\n\r\n\tvar hasOwnProperty = Object.prototype.hasOwnProperty;\r\n\r\n\t///////////////////////////////////////////////////////////////////////////\r\n\t// The Canvas object is a wrapper around an HTML5 <canvas> tag.\r\n\t//\r\n\t// @constructor\r\n\t// @param {string} cls List of classes to apply to the canvas.\r\n\t// @param {element} container Element onto which to append the canvas.\r\n\t//\r\n\t// Requiring a container is a little iffy, but unfortunately canvas\r\n\t// operations don't work unless the canvas is attached to the DOM.\r\n\r\n\tfunction Canvas(cls, container) {\r\n\r\n\t\tvar element = container.children(\".\" + cls)[0];\r\n\r\n\t\tif (element == null) {\r\n\r\n\t\t\telement = document.createElement(\"canvas\");\r\n\t\t\telement.className = cls;\r\n\r\n\t\t\t$(element).css({ direction: \"ltr\", position: \"absolute\", left: 0, top: 0 })\r\n\t\t\t\t.appendTo(container);\r\n\r\n\t\t\t// If HTML5 Canvas isn't available, fall back to [Ex|Flash]canvas\r\n\r\n\t\t\tif (!element.getContext) {\r\n\t\t\t\tif (window.G_vmlCanvasManager) {\r\n\t\t\t\t\telement = window.G_vmlCanvasManager.initElement(element);\r\n\t\t\t\t} else {\r\n\t\t\t\t\tthrow new Error(\"Canvas is not available. If you're using IE with a fall-back such as Excanvas, then there's either a mistake in your conditional include, or the page has no DOCTYPE and is rendering in Quirks Mode.\");\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis.element = element;\r\n\r\n\t\tvar context = this.context = element.getContext(\"2d\");\r\n\r\n\t\t// Determine the screen's ratio of physical to device-independent\r\n\t\t// pixels.  This is the ratio between the canvas width that the browser\r\n\t\t// advertises and the number of pixels actually present in that space.\r\n\r\n\t\t// The iPhone 4, for example, has a device-independent width of 320px,\r\n\t\t// but its screen is actually 640px wide.  It therefore has a pixel\r\n\t\t// ratio of 2, while most normal devices have a ratio of 1.\r\n\r\n\t\tvar devicePixelRatio = window.devicePixelRatio || 1,\r\n\t\t\tbackingStoreRatio =\r\n\t\t\t\tcontext.webkitBackingStorePixelRatio ||\r\n\t\t\t\tcontext.mozBackingStorePixelRatio ||\r\n\t\t\t\tcontext.msBackingStorePixelRatio ||\r\n\t\t\t\tcontext.oBackingStorePixelRatio ||\r\n\t\t\t\tcontext.backingStorePixelRatio || 1;\r\n\r\n\t\tthis.pixelRatio = devicePixelRatio / backingStoreRatio;\r\n\r\n\t\t// Size the canvas to match the internal dimensions of its container\r\n\r\n\t\tthis.resize(container.width(), container.height());\r\n\r\n\t\t// Collection of HTML div layers for text overlaid onto the canvas\r\n\r\n\t\tthis.textContainer = null;\r\n\t\tthis.text = {};\r\n\r\n\t\t// Cache of text fragments and metrics, so we can avoid expensively\r\n\t\t// re-calculating them when the plot is re-rendered in a loop.\r\n\r\n\t\tthis._textCache = {};\r\n\t}\r\n\r\n\t// Resizes the canvas to the given dimensions.\r\n\t//\r\n\t// @param {number} width New width of the canvas, in pixels.\r\n\t// @param {number} width New height of the canvas, in pixels.\r\n\r\n\tCanvas.prototype.resize = function(width, height) {\r\n\r\n\t\tif (width <= 0 || height <= 0) {\r\n\t\t\tthrow new Error(\"Invalid dimensions for plot, width = \" + width + \", height = \" + height);\r\n\t\t}\r\n\r\n\t\tvar element = this.element,\r\n\t\t\tcontext = this.context,\r\n\t\t\tpixelRatio = this.pixelRatio;\r\n\r\n\t\t// Resize the canvas, increasing its density based on the display's\r\n\t\t// pixel ratio; basically giving it more pixels without increasing the\r\n\t\t// size of its element, to take advantage of the fact that retina\r\n\t\t// displays have that many more pixels in the same advertised space.\r\n\r\n\t\t// Resizing should reset the state (excanvas seems to be buggy though)\r\n\r\n\t\tif (this.width != width) {\r\n\t\t\telement.width = width * pixelRatio;\r\n\t\t\telement.style.width = width + \"px\";\r\n\t\t\tthis.width = width;\r\n\t\t}\r\n\r\n\t\tif (this.height != height) {\r\n\t\t\telement.height = height * pixelRatio;\r\n\t\t\telement.style.height = height + \"px\";\r\n\t\t\tthis.height = height;\r\n\t\t}\r\n\r\n\t\t// Save the context, so we can reset in case we get replotted.  The\r\n\t\t// restore ensure that we're really back at the initial state, and\r\n\t\t// should be safe even if we haven't saved the initial state yet.\r\n\r\n\t\tcontext.restore();\r\n\t\tcontext.save();\r\n\r\n\t\t// Scale the coordinate space to match the display density; so even though we\r\n\t\t// may have twice as many pixels, we still want lines and other drawing to\r\n\t\t// appear at the same size; the extra pixels will just make them crisper.\r\n\r\n\t\tcontext.scale(pixelRatio, pixelRatio);\r\n\t};\r\n\r\n\t// Clears the entire canvas area, not including any overlaid HTML text\r\n\r\n\tCanvas.prototype.clear = function() {\r\n\t\tthis.context.clearRect(0, 0, this.width, this.height);\r\n\t};\r\n\r\n\t// Finishes rendering the canvas, including managing the text overlay.\r\n\r\n\tCanvas.prototype.render = function() {\r\n\r\n\t\tvar cache = this._textCache;\r\n\r\n\t\t// For each text layer, add elements marked as active that haven't\r\n\t\t// already been rendered, and remove those that are no longer active.\r\n\r\n\t\tfor (var layerKey in cache) {\r\n\t\t\tif (hasOwnProperty.call(cache, layerKey)) {\r\n\r\n\t\t\t\tvar layer = this.getTextLayer(layerKey),\r\n\t\t\t\t\tlayerCache = cache[layerKey];\r\n\r\n\t\t\t\tlayer.hide();\r\n\r\n\t\t\t\tfor (var styleKey in layerCache) {\r\n\t\t\t\t\tif (hasOwnProperty.call(layerCache, styleKey)) {\r\n\t\t\t\t\t\tvar styleCache = layerCache[styleKey];\r\n\t\t\t\t\t\tfor (var key in styleCache) {\r\n\t\t\t\t\t\t\tif (hasOwnProperty.call(styleCache, key)) {\r\n\t\t\t\t\t\t\t\tvar info = styleCache[key];\r\n\t\t\t\t\t\t\t\tif (info.active) {\r\n\t\t\t\t\t\t\t\t\tif (!info.rendered) {\r\n\t\t\t\t\t\t\t\t\t\tlayer.append(info.element);\r\n\t\t\t\t\t\t\t\t\t\tinfo.rendered = true;\r\n\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\t\t\tdelete styleCache[key];\r\n\t\t\t\t\t\t\t\t\tif (info.rendered) {\r\n\t\t\t\t\t\t\t\t\t\tinfo.element.detach();\r\n\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlayer.show();\r\n\t\t\t}\r\n\t\t}\r\n\t};\r\n\r\n\t// Creates (if necessary) and returns the text overlay container.\r\n\t//\r\n\t// @param {string} classes String of space-separated CSS classes used to\r\n\t//     uniquely identify the text layer.\r\n\t// @return {object} The jQuery-wrapped text-layer div.\r\n\r\n\tCanvas.prototype.getTextLayer = function(classes) {\r\n\r\n\t\tvar layer = this.text[classes];\r\n\r\n\t\t// Create the text layer if it doesn't exist\r\n\r\n\t\tif (layer == null) {\r\n\r\n\t\t\t// Create the text layer container, if it doesn't exist\r\n\r\n\t\t\tif (this.textContainer == null) {\r\n\t\t\t\tthis.textContainer = $(\"<div class='flot-text'></div>\")\r\n\t\t\t\t\t.css({\r\n\t\t\t\t\t\tposition: \"absolute\",\r\n\t\t\t\t\t\ttop: 0,\r\n\t\t\t\t\t\tleft: 0,\r\n\t\t\t\t\t\tbottom: 0,\r\n\t\t\t\t\t\tright: 0,\r\n\t\t\t\t\t\t'font-size': \"smaller\",\r\n\t\t\t\t\t\tcolor: \"#545454\"\r\n\t\t\t\t\t})\r\n\t\t\t\t\t.insertAfter(this.element);\r\n\t\t\t}\r\n\r\n\t\t\tlayer = this.text[classes] = $(\"<div></div>\")\r\n\t\t\t\t.addClass(classes)\r\n\t\t\t\t.css({\r\n\t\t\t\t\tposition: \"absolute\",\r\n\t\t\t\t\ttop: 0,\r\n\t\t\t\t\tleft: 0,\r\n\t\t\t\t\tbottom: 0,\r\n\t\t\t\t\tright: 0\r\n\t\t\t\t})\r\n\t\t\t\t.appendTo(this.textContainer);\r\n\t\t}\r\n\r\n\t\treturn layer;\r\n\t};\r\n\r\n\t// Creates (if necessary) and returns a text info object.\r\n\t//\r\n\t// The object looks like this:\r\n\t//\r\n\t// {\r\n\t//     width: Width of the text's wrapper div.\r\n\t//     height: Height of the text's wrapper div.\r\n\t//     active: Flag indicating whether the text should be visible.\r\n\t//     rendered: Flag indicating whether the text is currently visible.\r\n\t//     element: The jQuery-wrapped HTML div containing the text.\r\n\t// }\r\n\t//\r\n\t// Canvas maintains a cache of recently-used text info objects; getTextInfo\r\n\t// either returns the cached element or creates a new entry.\r\n\t//\r\n\t// @param {string} layer A string of space-separated CSS classes uniquely\r\n\t//     identifying the layer containing this text.\r\n\t// @param {string} text Text string to retrieve info for.\r\n\t// @param {(string|object)=} font Either a string of space-separated CSS\r\n\t//     classes or a font-spec object, defining the text's font and style.\r\n\t// @param {number=} angle Angle at which to rotate the text, in degrees.\r\n\t//     Angle is currently unused, it will be implemented in the future.\r\n\t// @return {object} a text info object.\r\n\r\n\tCanvas.prototype.getTextInfo = function(layer, text, font, angle) {\r\n\r\n\t\tvar textStyle, layerCache, styleCache, info;\r\n\r\n\t\t// Cast the value to a string, in case we were given a number or such\r\n\r\n\t\ttext = \"\" + text;\r\n\r\n\t\t// If the font is a font-spec object, generate a CSS font definition\r\n\r\n\t\tif (typeof font === \"object\") {\r\n\t\t\ttextStyle = font.style + \" \" + font.variant + \" \" + font.weight + \" \" + font.size + \"px/\" + font.lineHeight + \"px \" + font.family;\r\n\t\t} else {\r\n\t\t\ttextStyle = font;\r\n\t\t}\r\n\r\n\t\t// Retrieve (or create) the cache for the text's layer and styles\r\n\r\n\t\tlayerCache = this._textCache[layer];\r\n\r\n\t\tif (layerCache == null) {\r\n\t\t\tlayerCache = this._textCache[layer] = {};\r\n\t\t}\r\n\r\n\t\tstyleCache = layerCache[textStyle];\r\n\r\n\t\tif (styleCache == null) {\r\n\t\t\tstyleCache = layerCache[textStyle] = {};\r\n\t\t}\r\n\r\n\t\tinfo = styleCache[text];\r\n\r\n\t\t// If we can't find a matching element in our cache, create a new one\r\n\r\n\t\tif (info == null) {\r\n\r\n\t\t\tvar element = $(\"<div></div>\").html(text)\r\n\t\t\t\t.css({\r\n\t\t\t\t\tposition: \"absolute\",\r\n\t\t\t\t\ttop: -9999\r\n\t\t\t\t})\r\n\t\t\t\t.appendTo(this.getTextLayer(layer));\r\n\r\n\t\t\tif (typeof font === \"object\") {\r\n\t\t\t\telement.css({\r\n\t\t\t\t\tfont: textStyle,\r\n\t\t\t\t\tcolor: font.color\r\n\t\t\t\t});\r\n\t\t\t} else if (typeof font === \"string\") {\r\n\t\t\t\telement.addClass(font);\r\n\t\t\t}\r\n\r\n\t\t\tinfo = styleCache[text] = {\r\n\t\t\t\tactive: false,\r\n\t\t\t\trendered: false,\r\n\t\t\t\telement: element,\r\n\t\t\t\twidth: element.outerWidth(true),\r\n\t\t\t\theight: element.outerHeight(true)\r\n\t\t\t};\r\n\r\n\t\t\telement.detach();\r\n\t\t}\r\n\r\n\t\treturn info;\r\n\t};\r\n\r\n\t// Adds a text string to the canvas text overlay.\r\n\t//\r\n\t// The text isn't drawn immediately; it is marked as rendering, which will\r\n\t// result in its addition to the canvas on the next render pass.\r\n\t//\r\n\t// @param {string} layer A string of space-separated CSS classes uniquely\r\n\t//     identifying the layer containing this text.\r\n\t// @param {number} x X coordinate at which to draw the text.\r\n\t// @param {number} y Y coordinate at which to draw the text.\r\n\t// @param {string} text Text string to draw.\r\n\t// @param {(string|object)=} font Either a string of space-separated CSS\r\n\t//     classes or a font-spec object, defining the text's font and style.\r\n\t// @param {number=} angle Angle at which to rotate the text, in degrees.\r\n\t//     Angle is currently unused, it will be implemented in the future.\r\n\t// @param {string=} halign Horizontal alignment of the text; either \"left\",\r\n\t//     \"center\" or \"right\".\r\n\t// @param {string=} valign Vertical alignment of the text; either \"top\",\r\n\t//     \"middle\" or \"bottom\".\r\n\r\n\tCanvas.prototype.addText = function(layer, x, y, text, font, angle, halign, valign) {\r\n\r\n\t\tvar info = this.getTextInfo(layer, text, font, angle);\r\n\r\n\t\t// Mark the div for inclusion in the next render pass\r\n\r\n\t\tinfo.active = true;\r\n\r\n\t\t// Tweak the div's position to match the text's alignment\r\n\r\n\t\tif (halign == \"center\") {\r\n\t\t\tx -= info.width / 2;\r\n\t\t} else if (halign == \"right\") {\r\n\t\t\tx -= info.width;\r\n\t\t}\r\n\r\n\t\tif (valign == \"middle\") {\r\n\t\t\ty -= info.height / 2;\r\n\t\t} else if (valign == \"bottom\") {\r\n\t\t\ty -= info.height;\r\n\t\t}\r\n\r\n\t\t// Move the element to its final position within the container\r\n\r\n\t\tinfo.element.css({\r\n\t\t\ttop: Math.round(y),\r\n\t\t\tleft: Math.round(x)\r\n\t\t});\r\n\t};\r\n\r\n\t// Removes one or more text strings from the canvas text overlay.\r\n\t//\r\n\t// If no parameters are given, all text within the layer is removed.\r\n\t// The text is not actually removed; it is simply marked as inactive, which\r\n\t// will result in its removal on the next render pass.\r\n\t//\r\n\t// @param {string} layer A string of space-separated CSS classes uniquely\r\n\t//     identifying the layer containing this text.\r\n\t// @param {string} text Text string to remove.\r\n\t// @param {(string|object)=} font Either a string of space-separated CSS\r\n\t//     classes or a font-spec object, defining the text's font and style.\r\n\t// @param {number=} angle Angle at which the text is rotated, in degrees.\r\n\t//     Angle is currently unused, it will be implemented in the future.\r\n\r\n\tCanvas.prototype.removeText = function(layer, text, font, angle) {\r\n\t\tif (text == null) {\r\n\t\t\tvar layerCache = this._textCache[layer];\r\n\t\t\tif (layerCache != null) {\r\n\t\t\t\tfor (var styleKey in layerCache) {\r\n\t\t\t\t\tif (hasOwnProperty.call(layerCache, styleKey)) {\r\n\t\t\t\t\t\tvar styleCache = layerCache[styleKey]\r\n\t\t\t\t\t\tfor (var key in styleCache) {\r\n\t\t\t\t\t\t\tif (hasOwnProperty.call(styleCache, key)) {\r\n\t\t\t\t\t\t\t\tstyleCache[key].active = false;\r\n\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tthis.getTextInfo(layer, text, font, angle).active = false;\r\n\t\t}\r\n\t};\r\n\r\n\t///////////////////////////////////////////////////////////////////////////\r\n\t// The top-level container for the entire plot.\r\n\r\n    function Plot(placeholder, data_, options_, plugins) {\r\n        // data is on the form:\r\n        //   [ series1, series2 ... ]\r\n        // where series is either just the data as [ [x1, y1], [x2, y2], ... ]\r\n        // or { data: [ [x1, y1], [x2, y2], ... ], label: \"some label\", ... }\r\n\r\n        var series = [],\r\n            options = {\r\n                // the color theme used for graphs\r\n                colors: [\"#edc240\", \"#afd8f8\", \"#cb4b4b\", \"#4da74d\", \"#9440ed\"],\r\n                legend: {\r\n                    show: true,\r\n                    noColumns: 1, // number of colums in legend table\r\n                    labelFormatter: null, // fn: string -> string\r\n                    labelBoxBorderColor: \"#ccc\", // border color for the little label boxes\r\n                    container: null, // container (as jQuery object) to put legend in, null means default on top of graph\r\n                    position: \"ne\", // position of default legend container within plot\r\n                    margin: 5, // distance from grid edge to default legend container within plot\r\n                    backgroundColor: null, // null means auto-detect\r\n                    backgroundOpacity: 0.85, // set to 0 to avoid background\r\n                    sorted: null    // default to no legend sorting\r\n                },\r\n                xaxis: {\r\n                    show: null, // null = auto-detect, true = always, false = never\r\n                    position: \"bottom\", // or \"top\"\r\n                    mode: null, // null or \"time\"\r\n                    font: null, // null (derived from CSS in placeholder) or object like { size: 11, lineHeight: 13, style: \"italic\", weight: \"bold\", family: \"sans-serif\", variant: \"small-caps\" }\r\n                    color: null, // base color, labels, ticks\r\n                    tickColor: null, // possibly different color of ticks, e.g. \"rgba(0,0,0,0.15)\"\r\n                    transform: null, // null or f: number -> number to transform axis\r\n                    inverseTransform: null, // if transform is set, this should be the inverse function\r\n                    min: null, // min. value to show, null means set automatically\r\n                    max: null, // max. value to show, null means set automatically\r\n                    autoscaleMargin: null, // margin in % to add if auto-setting min/max\r\n                    ticks: null, // either [1, 3] or [[1, \"a\"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks\r\n                    tickFormatter: null, // fn: number -> string\r\n                    labelWidth: null, // size of tick labels in pixels\r\n                    labelHeight: null,\r\n                    reserveSpace: null, // whether to reserve space even if axis isn't shown\r\n                    tickLength: null, // size in pixels of ticks, or \"full\" for whole line\r\n                    alignTicksWithAxis: null, // axis number or null for no sync\r\n                    tickDecimals: null, // no. of decimals, null means auto\r\n                    tickSize: null, // number or [number, \"unit\"]\r\n                    minTickSize: null // number or [number, \"unit\"]\r\n                },\r\n                yaxis: {\r\n                    autoscaleMargin: 0.02,\r\n                    position: \"left\" // or \"right\"\r\n                },\r\n                xaxes: [],\r\n                yaxes: [],\r\n                series: {\r\n                    points: {\r\n                        show: false,\r\n                        radius: 3,\r\n                        lineWidth: 2, // in pixels\r\n                        fill: true,\r\n                        fillColor: \"#ffffff\",\r\n                        symbol: \"circle\" // or callback\r\n                    },\r\n                    lines: {\r\n                        // we don't put in show: false so we can see\r\n                        // whether lines were actively disabled\r\n                        lineWidth: 2, // in pixels\r\n                        fill: false,\r\n                        fillColor: null,\r\n                        steps: false\r\n                        // Omit 'zero', so we can later default its value to\r\n                        // match that of the 'fill' option.\r\n                    },\r\n                    bars: {\r\n                        show: false,\r\n                        lineWidth: 2, // in pixels\r\n                        barWidth: 1, // in units of the x axis\r\n                        fill: true,\r\n                        fillColor: null,\r\n                        align: \"left\", // \"left\", \"right\", or \"center\"\r\n                        horizontal: false,\r\n                        zero: true\r\n                    },\r\n                    shadowSize: 3,\r\n                    highlightColor: null\r\n                },\r\n                grid: {\r\n                    show: true,\r\n                    aboveData: false,\r\n                    color: \"#545454\", // primary color used for outline and labels\r\n                    backgroundColor: null, // null for transparent, else color\r\n                    borderColor: null, // set if different from the grid color\r\n                    tickColor: null, // color for the ticks, e.g. \"rgba(0,0,0,0.15)\"\r\n                    margin: 0, // distance from the canvas edge to the grid\r\n                    labelMargin: 5, // in pixels\r\n                    axisMargin: 8, // in pixels\r\n                    borderWidth: 2, // in pixels\r\n                    minBorderMargin: null, // in pixels, null means taken from points radius\r\n                    markings: null, // array of ranges or fn: axes -> array of ranges\r\n                    markingsColor: \"#f4f4f4\",\r\n                    markingsLineWidth: 2,\r\n                    // interactive stuff\r\n                    clickable: false,\r\n                    hoverable: false,\r\n                    autoHighlight: true, // highlight in case mouse is near\r\n                    mouseActiveRadius: 10 // how far the mouse can be away to activate an item\r\n                },\r\n                interaction: {\r\n                    redrawOverlayInterval: 1000/60 // time between updates, -1 means in same flow\r\n                },\r\n                hooks: {}\r\n            },\r\n        surface = null,     // the canvas for the plot itself\r\n        overlay = null,     // canvas for interactive stuff on top of plot\r\n        eventHolder = null, // jQuery object that events should be bound to\r\n        ctx = null, octx = null,\r\n        xaxes = [], yaxes = [],\r\n        plotOffset = { left: 0, right: 0, top: 0, bottom: 0},\r\n        plotWidth = 0, plotHeight = 0,\r\n        hooks = {\r\n            processOptions: [],\r\n            processRawData: [],\r\n            processDatapoints: [],\r\n            processOffset: [],\r\n            drawBackground: [],\r\n            drawSeries: [],\r\n            draw: [],\r\n            bindEvents: [],\r\n            drawOverlay: [],\r\n            shutdown: []\r\n        },\r\n        plot = this;\r\n\r\n        // public functions\r\n        plot.setData = setData;\r\n        plot.setupGrid = setupGrid;\r\n        plot.draw = draw;\r\n        plot.getPlaceholder = function() { return placeholder; };\r\n        plot.getCanvas = function() { return surface.element; };\r\n        plot.getPlotOffset = function() { return plotOffset; };\r\n        plot.width = function () { return plotWidth; };\r\n        plot.height = function () { return plotHeight; };\r\n        plot.offset = function () {\r\n            var o = eventHolder.offset();\r\n            o.left += plotOffset.left;\r\n            o.top += plotOffset.top;\r\n            return o;\r\n        };\r\n        plot.getData = function () { return series; };\r\n        plot.getAxes = function () {\r\n            var res = {}, i;\r\n            $.each(xaxes.concat(yaxes), function (_, axis) {\r\n                if (axis)\r\n                    res[axis.direction + (axis.n != 1 ? axis.n : \"\") + \"axis\"] = axis;\r\n            });\r\n            return res;\r\n        };\r\n        plot.getXAxes = function () { return xaxes; };\r\n        plot.getYAxes = function () { return yaxes; };\r\n        plot.c2p = canvasToAxisCoords;\r\n        plot.p2c = axisToCanvasCoords;\r\n        plot.getOptions = function () { return options; };\r\n        plot.highlight = highlight;\r\n        plot.unhighlight = unhighlight;\r\n        plot.triggerRedrawOverlay = triggerRedrawOverlay;\r\n        plot.pointOffset = function(point) {\r\n            return {\r\n                left: parseInt(xaxes[axisNumber(point, \"x\") - 1].p2c(+point.x) + plotOffset.left, 10),\r\n                top: parseInt(yaxes[axisNumber(point, \"y\") - 1].p2c(+point.y) + plotOffset.top, 10)\r\n            };\r\n        };\r\n        plot.shutdown = shutdown;\r\n        plot.resize = function () {\r\n        \tvar width = placeholder.width(),\r\n        \t\theight = placeholder.height();\r\n            surface.resize(width, height);\r\n            overlay.resize(width, height);\r\n        };\r\n\r\n        // public attributes\r\n        plot.hooks = hooks;\r\n\r\n        // initialize\r\n        initPlugins(plot);\r\n        parseOptions(options_);\r\n        setupCanvases();\r\n        setData(data_);\r\n        setupGrid();\r\n        draw();\r\n        bindEvents();\r\n\r\n\r\n        function executeHooks(hook, args) {\r\n            args = [plot].concat(args);\r\n            for (var i = 0; i < hook.length; ++i)\r\n                hook[i].apply(this, args);\r\n        }\r\n\r\n        function initPlugins() {\r\n\r\n            // References to key classes, allowing plugins to modify them\r\n\r\n            var classes = {\r\n                Canvas: Canvas\r\n            };\r\n\r\n            for (var i = 0; i < plugins.length; ++i) {\r\n                var p = plugins[i];\r\n                p.init(plot, classes);\r\n                if (p.options)\r\n                    $.extend(true, options, p.options);\r\n            }\r\n        }\r\n\r\n        function parseOptions(opts) {\r\n\r\n            $.extend(true, options, opts);\r\n\r\n            if (options.xaxis.color == null)\r\n                options.xaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString();\r\n            if (options.yaxis.color == null)\r\n                options.yaxis.color = $.color.parse(options.grid.color).scale('a', 0.22).toString();\r\n\r\n            if (options.xaxis.tickColor == null) // grid.tickColor for back-compatibility\r\n                options.xaxis.tickColor = options.grid.tickColor || options.xaxis.color;\r\n            if (options.yaxis.tickColor == null) // grid.tickColor for back-compatibility\r\n                options.yaxis.tickColor = options.grid.tickColor || options.yaxis.color;\r\n\r\n            if (options.grid.borderColor == null)\r\n                options.grid.borderColor = options.grid.color;\r\n            if (options.grid.tickColor == null)\r\n                options.grid.tickColor = $.color.parse(options.grid.color).scale('a', 0.22).toString();\r\n\r\n            // Fill in defaults for axis options, including any unspecified\r\n            // font-spec fields, if a font-spec was provided.\r\n\r\n            // If no x/y axis options were provided, create one of each anyway,\r\n            // since the rest of the code assumes that they exist.\r\n\r\n            var i, axisOptions, axisCount,\r\n                fontDefaults = {\r\n                    style: placeholder.css(\"font-style\"),\r\n                    size: Math.round(0.8 * (+placeholder.css(\"font-size\").replace(\"px\", \"\") || 13)),\r\n                    variant: placeholder.css(\"font-variant\"),\r\n                    weight: placeholder.css(\"font-weight\"),\r\n                    family: placeholder.css(\"font-family\")\r\n                };\r\n\r\n            fontDefaults.lineHeight = fontDefaults.size * 1.15;\r\n\r\n            axisCount = options.xaxes.length || 1;\r\n            for (i = 0; i < axisCount; ++i) {\r\n\r\n                axisOptions = options.xaxes[i];\r\n                if (axisOptions && !axisOptions.tickColor) {\r\n                    axisOptions.tickColor = axisOptions.color;\r\n                }\r\n\r\n                axisOptions = $.extend(true, {}, options.xaxis, axisOptions);\r\n                options.xaxes[i] = axisOptions;\r\n\r\n                if (axisOptions.font) {\r\n                    axisOptions.font = $.extend({}, fontDefaults, axisOptions.font);\r\n                    if (!axisOptions.font.color) {\r\n                        axisOptions.font.color = axisOptions.color;\r\n                    }\r\n                }\r\n            }\r\n\r\n            axisCount = options.yaxes.length || 1;\r\n            for (i = 0; i < axisCount; ++i) {\r\n\r\n                axisOptions = options.yaxes[i];\r\n                if (axisOptions && !axisOptions.tickColor) {\r\n                    axisOptions.tickColor = axisOptions.color;\r\n                }\r\n\r\n                axisOptions = $.extend(true, {}, options.yaxis, axisOptions);\r\n                options.yaxes[i] = axisOptions;\r\n\r\n                if (axisOptions.font) {\r\n                    axisOptions.font = $.extend({}, fontDefaults, axisOptions.font);\r\n                    if (!axisOptions.font.color) {\r\n                        axisOptions.font.color = axisOptions.color;\r\n                    }\r\n                }\r\n            }\r\n\r\n            // backwards compatibility, to be removed in future\r\n            if (options.xaxis.noTicks && options.xaxis.ticks == null)\r\n                options.xaxis.ticks = options.xaxis.noTicks;\r\n            if (options.yaxis.noTicks && options.yaxis.ticks == null)\r\n                options.yaxis.ticks = options.yaxis.noTicks;\r\n            if (options.x2axis) {\r\n                options.xaxes[1] = $.extend(true, {}, options.xaxis, options.x2axis);\r\n                options.xaxes[1].position = \"top\";\r\n            }\r\n            if (options.y2axis) {\r\n                options.yaxes[1] = $.extend(true, {}, options.yaxis, options.y2axis);\r\n                options.yaxes[1].position = \"right\";\r\n            }\r\n            if (options.grid.coloredAreas)\r\n                options.grid.markings = options.grid.coloredAreas;\r\n            if (options.grid.coloredAreasColor)\r\n                options.grid.markingsColor = options.grid.coloredAreasColor;\r\n            if (options.lines)\r\n                $.extend(true, options.series.lines, options.lines);\r\n            if (options.points)\r\n                $.extend(true, options.series.points, options.points);\r\n            if (options.bars)\r\n                $.extend(true, options.series.bars, options.bars);\r\n            if (options.shadowSize != null)\r\n                options.series.shadowSize = options.shadowSize;\r\n            if (options.highlightColor != null)\r\n                options.series.highlightColor = options.highlightColor;\r\n\r\n            // save options on axes for future reference\r\n            for (i = 0; i < options.xaxes.length; ++i)\r\n                getOrCreateAxis(xaxes, i + 1).options = options.xaxes[i];\r\n            for (i = 0; i < options.yaxes.length; ++i)\r\n                getOrCreateAxis(yaxes, i + 1).options = options.yaxes[i];\r\n\r\n            // add hooks from options\r\n            for (var n in hooks)\r\n                if (options.hooks[n] && options.hooks[n].length)\r\n                    hooks[n] = hooks[n].concat(options.hooks[n]);\r\n\r\n            executeHooks(hooks.processOptions, [options]);\r\n        }\r\n\r\n        function setData(d) {\r\n            series = parseData(d);\r\n            fillInSeriesOptions();\r\n            processData();\r\n        }\r\n\r\n        function parseData(d) {\r\n            var res = [];\r\n            for (var i = 0; i < d.length; ++i) {\r\n                var s = $.extend(true, {}, options.series);\r\n\r\n                if (d[i].data != null) {\r\n                    s.data = d[i].data; // move the data instead of deep-copy\r\n                    delete d[i].data;\r\n\r\n                    $.extend(true, s, d[i]);\r\n\r\n                    d[i].data = s.data;\r\n                }\r\n                else\r\n                    s.data = d[i];\r\n                res.push(s);\r\n            }\r\n\r\n            return res;\r\n        }\r\n\r\n        function axisNumber(obj, coord) {\r\n            var a = obj[coord + \"axis\"];\r\n            if (typeof a == \"object\") // if we got a real axis, extract number\r\n                a = a.n;\r\n            if (typeof a != \"number\")\r\n                a = 1; // default to first axis\r\n            return a;\r\n        }\r\n\r\n        function allAxes() {\r\n            // return flat array without annoying null entries\r\n            return $.grep(xaxes.concat(yaxes), function (a) { return a; });\r\n        }\r\n\r\n        function canvasToAxisCoords(pos) {\r\n            // return an object with x/y corresponding to all used axes\r\n            var res = {}, i, axis;\r\n            for (i = 0; i < xaxes.length; ++i) {\r\n                axis = xaxes[i];\r\n                if (axis && axis.used)\r\n                    res[\"x\" + axis.n] = axis.c2p(pos.left);\r\n            }\r\n\r\n            for (i = 0; i < yaxes.length; ++i) {\r\n                axis = yaxes[i];\r\n                if (axis && axis.used)\r\n                    res[\"y\" + axis.n] = axis.c2p(pos.top);\r\n            }\r\n\r\n            if (res.x1 !== undefined)\r\n                res.x = res.x1;\r\n            if (res.y1 !== undefined)\r\n                res.y = res.y1;\r\n\r\n            return res;\r\n        }\r\n\r\n        function axisToCanvasCoords(pos) {\r\n            // get canvas coords from the first pair of x/y found in pos\r\n            var res = {}, i, axis, key;\r\n\r\n            for (i = 0; i < xaxes.length; ++i) {\r\n                axis = xaxes[i];\r\n                if (axis && axis.used) {\r\n                    key = \"x\" + axis.n;\r\n                    if (pos[key] == null && axis.n == 1)\r\n                        key = \"x\";\r\n\r\n                    if (pos[key] != null) {\r\n                        res.left = axis.p2c(pos[key]);\r\n                        break;\r\n                    }\r\n                }\r\n            }\r\n\r\n            for (i = 0; i < yaxes.length; ++i) {\r\n                axis = yaxes[i];\r\n                if (axis && axis.used) {\r\n                    key = \"y\" + axis.n;\r\n                    if (pos[key] == null && axis.n == 1)\r\n                        key = \"y\";\r\n\r\n                    if (pos[key] != null) {\r\n                        res.top = axis.p2c(pos[key]);\r\n                        break;\r\n                    }\r\n                }\r\n            }\r\n\r\n            return res;\r\n        }\r\n\r\n        function getOrCreateAxis(axes, number) {\r\n            if (!axes[number - 1])\r\n                axes[number - 1] = {\r\n                    n: number, // save the number for future reference\r\n                    direction: axes == xaxes ? \"x\" : \"y\",\r\n                    options: $.extend(true, {}, axes == xaxes ? options.xaxis : options.yaxis)\r\n                };\r\n\r\n            return axes[number - 1];\r\n        }\r\n\r\n        function fillInSeriesOptions() {\r\n\r\n            var neededColors = series.length, maxIndex = -1, i;\r\n\r\n            // Subtract the number of series that already have fixed colors or\r\n            // color indexes from the number that we still need to generate.\r\n\r\n            for (i = 0; i < series.length; ++i) {\r\n                var sc = series[i].color;\r\n                if (sc != null) {\r\n                    neededColors--;\r\n                    if (typeof sc == \"number\" && sc > maxIndex) {\r\n                        maxIndex = sc;\r\n                    }\r\n                }\r\n            }\r\n\r\n            // If any of the series have fixed color indexes, then we need to\r\n            // generate at least as many colors as the highest index.\r\n\r\n            if (neededColors <= maxIndex) {\r\n                neededColors = maxIndex + 1;\r\n            }\r\n\r\n            // Generate all the colors, using first the option colors and then\r\n            // variations on those colors once they're exhausted.\r\n\r\n            var c, colors = [], colorPool = options.colors,\r\n                colorPoolSize = colorPool.length, variation = 0;\r\n\r\n            for (i = 0; i < neededColors; i++) {\r\n\r\n                c = $.color.parse(colorPool[i % colorPoolSize] || \"#666\");\r\n\r\n                // Each time we exhaust the colors in the pool we adjust\r\n                // a scaling factor used to produce more variations on\r\n                // those colors. The factor alternates negative/positive\r\n                // to produce lighter/darker colors.\r\n\r\n                // Reset the variation after every few cycles, or else\r\n                // it will end up producing only white or black colors.\r\n\r\n                if (i % colorPoolSize == 0 && i) {\r\n                    if (variation >= 0) {\r\n                        if (variation < 0.5) {\r\n                            variation = -variation - 0.2;\r\n                        } else variation = 0;\r\n                    } else variation = -variation;\r\n                }\r\n\r\n                colors[i] = c.scale('rgb', 1 + variation);\r\n            }\r\n\r\n            // Finalize the series options, filling in their colors\r\n\r\n            var colori = 0, s;\r\n            for (i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n\r\n                // assign colors\r\n                if (s.color == null) {\r\n                    s.color = colors[colori].toString();\r\n                    ++colori;\r\n                }\r\n                else if (typeof s.color == \"number\")\r\n                    s.color = colors[s.color].toString();\r\n\r\n                // turn on lines automatically in case nothing is set\r\n                if (s.lines.show == null) {\r\n                    var v, show = true;\r\n                    for (v in s)\r\n                        if (s[v] && s[v].show) {\r\n                            show = false;\r\n                            break;\r\n                        }\r\n                    if (show)\r\n                        s.lines.show = true;\r\n                }\r\n\r\n                // If nothing was provided for lines.zero, default it to match\r\n                // lines.fill, since areas by default should extend to zero.\r\n\r\n                if (s.lines.zero == null) {\r\n                    s.lines.zero = !!s.lines.fill;\r\n                }\r\n\r\n                // setup axes\r\n                s.xaxis = getOrCreateAxis(xaxes, axisNumber(s, \"x\"));\r\n                s.yaxis = getOrCreateAxis(yaxes, axisNumber(s, \"y\"));\r\n            }\r\n        }\r\n\r\n        function processData() {\r\n            var topSentry = Number.POSITIVE_INFINITY,\r\n                bottomSentry = Number.NEGATIVE_INFINITY,\r\n                fakeInfinity = Number.MAX_VALUE,\r\n                i, j, k, m, length,\r\n                s, points, ps, x, y, axis, val, f, p,\r\n                data, format;\r\n\r\n            function updateAxis(axis, min, max) {\r\n                if (min < axis.datamin && min != -fakeInfinity)\r\n                    axis.datamin = min;\r\n                if (max > axis.datamax && max != fakeInfinity)\r\n                    axis.datamax = max;\r\n            }\r\n\r\n            $.each(allAxes(), function (_, axis) {\r\n                // init axis\r\n                axis.datamin = topSentry;\r\n                axis.datamax = bottomSentry;\r\n                axis.used = false;\r\n            });\r\n\r\n            for (i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n                s.datapoints = { points: [] };\r\n\r\n                executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]);\r\n            }\r\n\r\n            // first pass: clean and copy data\r\n            for (i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n\r\n                data = s.data;\r\n                format = s.datapoints.format;\r\n\r\n                if (!format) {\r\n                    format = [];\r\n                    // find out how to copy\r\n                    format.push({ x: true, number: true, required: true });\r\n                    format.push({ y: true, number: true, required: true });\r\n\r\n                    if (s.bars.show || (s.lines.show && s.lines.fill)) {\r\n                        var autoscale = !!((s.bars.show && s.bars.zero) || (s.lines.show && s.lines.zero));\r\n                        format.push({ y: true, number: true, required: false, defaultValue: 0, autoscale: autoscale });\r\n                        if (s.bars.horizontal) {\r\n                            delete format[format.length - 1].y;\r\n                            format[format.length - 1].x = true;\r\n                        }\r\n                    }\r\n\r\n                    s.datapoints.format = format;\r\n                }\r\n\r\n                if (s.datapoints.pointsize != null)\r\n                    continue; // already filled in\r\n\r\n                s.datapoints.pointsize = format.length;\r\n\r\n                ps = s.datapoints.pointsize;\r\n                points = s.datapoints.points;\r\n\r\n                var insertSteps = s.lines.show && s.lines.steps;\r\n                s.xaxis.used = s.yaxis.used = true;\r\n\r\n                for (j = k = 0; j < data.length; ++j, k += ps) {\r\n                    p = data[j];\r\n\r\n                    var nullify = p == null;\r\n                    if (!nullify) {\r\n                        for (m = 0; m < ps; ++m) {\r\n                            val = p[m];\r\n                            f = format[m];\r\n\r\n                            if (f) {\r\n                                if (f.number && val != null) {\r\n                                    val = +val; // convert to number\r\n                                    if (isNaN(val))\r\n                                        val = null;\r\n                                    else if (val == Infinity)\r\n                                        val = fakeInfinity;\r\n                                    else if (val == -Infinity)\r\n                                        val = -fakeInfinity;\r\n                                }\r\n\r\n                                if (val == null) {\r\n                                    if (f.required)\r\n                                        nullify = true;\r\n\r\n                                    if (f.defaultValue != null)\r\n                                        val = f.defaultValue;\r\n                                }\r\n                            }\r\n\r\n                            points[k + m] = val;\r\n                        }\r\n                    }\r\n\r\n                    if (nullify) {\r\n                        for (m = 0; m < ps; ++m) {\r\n                            val = points[k + m];\r\n                            if (val != null) {\r\n                                f = format[m];\r\n                                // extract min/max info\r\n                                if (f.x)\r\n                                    updateAxis(s.xaxis, val, val);\r\n                                if (f.y)\r\n                                    updateAxis(s.yaxis, val, val);\r\n                            }\r\n                            points[k + m] = null;\r\n                        }\r\n                    }\r\n                    else {\r\n                        // a little bit of line specific stuff that\r\n                        // perhaps shouldn't be here, but lacking\r\n                        // better means...\r\n                        if (insertSteps && k > 0\r\n                            && points[k - ps] != null\r\n                            && points[k - ps] != points[k]\r\n                            && points[k - ps + 1] != points[k + 1]) {\r\n                            // copy the point to make room for a middle point\r\n                            for (m = 0; m < ps; ++m)\r\n                                points[k + ps + m] = points[k + m];\r\n\r\n                            // middle point has same y\r\n                            points[k + 1] = points[k - ps + 1];\r\n\r\n                            // we've added a point, better reflect that\r\n                            k += ps;\r\n                        }\r\n                    }\r\n                }\r\n            }\r\n\r\n            // give the hooks a chance to run\r\n            for (i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n\r\n                executeHooks(hooks.processDatapoints, [ s, s.datapoints]);\r\n            }\r\n\r\n            // second pass: find datamax/datamin for auto-scaling\r\n            for (i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n                points = s.datapoints.points,\r\n                ps = s.datapoints.pointsize;\r\n                format = s.datapoints.format;\r\n\r\n                var xmin = topSentry, ymin = topSentry,\r\n                    xmax = bottomSentry, ymax = bottomSentry;\r\n\r\n                for (j = 0; j < points.length; j += ps) {\r\n                    if (points[j] == null)\r\n                        continue;\r\n\r\n                    for (m = 0; m < ps; ++m) {\r\n                        val = points[j + m];\r\n                        f = format[m];\r\n                        if (!f || f.autoscale === false || val == fakeInfinity || val == -fakeInfinity)\r\n                            continue;\r\n\r\n                        if (f.x) {\r\n                            if (val < xmin)\r\n                                xmin = val;\r\n                            if (val > xmax)\r\n                                xmax = val;\r\n                        }\r\n                        if (f.y) {\r\n                            if (val < ymin)\r\n                                ymin = val;\r\n                            if (val > ymax)\r\n                                ymax = val;\r\n                        }\r\n                    }\r\n                }\r\n\r\n                if (s.bars.show) {\r\n                    // make sure we got room for the bar on the dancing floor\r\n                    var delta;\r\n\r\n                    switch (s.bars.align) {\r\n                        case \"left\":\r\n                            delta = 0;\r\n                            break;\r\n                        case \"right\":\r\n                            delta = -s.bars.barWidth;\r\n                            break;\r\n                        case \"center\":\r\n                            delta = -s.bars.barWidth / 2;\r\n                            break;\r\n                        default:\r\n                            throw new Error(\"Invalid bar alignment: \" + s.bars.align);\r\n                    }\r\n\r\n                    if (s.bars.horizontal) {\r\n                        ymin += delta;\r\n                        ymax += delta + s.bars.barWidth;\r\n                    }\r\n                    else {\r\n                        xmin += delta;\r\n                        xmax += delta + s.bars.barWidth;\r\n                    }\r\n                }\r\n\r\n                updateAxis(s.xaxis, xmin, xmax);\r\n                updateAxis(s.yaxis, ymin, ymax);\r\n            }\r\n\r\n            $.each(allAxes(), function (_, axis) {\r\n                if (axis.datamin == topSentry)\r\n                    axis.datamin = null;\r\n                if (axis.datamax == bottomSentry)\r\n                    axis.datamax = null;\r\n            });\r\n        }\r\n\r\n        function setupCanvases() {\r\n\r\n            // Make sure the placeholder is clear of everything except canvases\r\n            // from a previous plot in this container that we'll try to re-use.\r\n\r\n            placeholder.css(\"padding\", 0) // padding messes up the positioning\r\n                .children(\":not(.flot-base,.flot-overlay)\").remove();\r\n\r\n            if (placeholder.css(\"position\") == 'static')\r\n                placeholder.css(\"position\", \"relative\"); // for positioning labels and overlay\r\n\r\n            surface = new Canvas(\"flot-base\", placeholder);\r\n            overlay = new Canvas(\"flot-overlay\", placeholder); // overlay canvas for interactive features\r\n\r\n            ctx = surface.context;\r\n            octx = overlay.context;\r\n\r\n            // define which element we're listening for events on\r\n            eventHolder = $(overlay.element).unbind();\r\n\r\n            // If we're re-using a plot object, shut down the old one\r\n\r\n            var existing = placeholder.data(\"plot\");\r\n\r\n            if (existing) {\r\n                existing.shutdown();\r\n                overlay.clear();\r\n            }\r\n\r\n            // save in case we get replotted\r\n            placeholder.data(\"plot\", plot);\r\n        }\r\n\r\n        function bindEvents() {\r\n            // bind events\r\n            if (options.grid.hoverable) {\r\n                eventHolder.mousemove(onMouseMove);\r\n\r\n                // Use bind, rather than .mouseleave, because we officially\r\n                // still support jQuery 1.2.6, which doesn't define a shortcut\r\n                // for mouseenter or mouseleave.  This was a bug/oversight that\r\n                // was fixed somewhere around 1.3.x.  We can return to using\r\n                // .mouseleave when we drop support for 1.2.6.\r\n\r\n                eventHolder.bind(\"mouseleave\", onMouseLeave);\r\n            }\r\n\r\n            if (options.grid.clickable)\r\n                eventHolder.click(onClick);\r\n\r\n            executeHooks(hooks.bindEvents, [eventHolder]);\r\n        }\r\n\r\n        function shutdown() {\r\n            if (redrawTimeout)\r\n                clearTimeout(redrawTimeout);\r\n\r\n            eventHolder.unbind(\"mousemove\", onMouseMove);\r\n            eventHolder.unbind(\"mouseleave\", onMouseLeave);\r\n            eventHolder.unbind(\"click\", onClick);\r\n\r\n            executeHooks(hooks.shutdown, [eventHolder]);\r\n        }\r\n\r\n        function setTransformationHelpers(axis) {\r\n            // set helper functions on the axis, assumes plot area\r\n            // has been computed already\r\n\r\n            function identity(x) { return x; }\r\n\r\n            var s, m, t = axis.options.transform || identity,\r\n                it = axis.options.inverseTransform;\r\n\r\n            // precompute how much the axis is scaling a point\r\n            // in canvas space\r\n            if (axis.direction == \"x\") {\r\n                s = axis.scale = plotWidth / Math.abs(t(axis.max) - t(axis.min));\r\n                m = Math.min(t(axis.max), t(axis.min));\r\n            }\r\n            else {\r\n                s = axis.scale = plotHeight / Math.abs(t(axis.max) - t(axis.min));\r\n                s = -s;\r\n                m = Math.max(t(axis.max), t(axis.min));\r\n            }\r\n\r\n            // data point to canvas coordinate\r\n            if (t == identity) // slight optimization\r\n                axis.p2c = function (p) { return (p - m) * s; };\r\n            else\r\n                axis.p2c = function (p) { return (t(p) - m) * s; };\r\n            // canvas coordinate to data point\r\n            if (!it)\r\n                axis.c2p = function (c) { return m + c / s; };\r\n            else\r\n                axis.c2p = function (c) { return it(m + c / s); };\r\n        }\r\n\r\n        function measureTickLabels(axis) {\r\n\r\n            var opts = axis.options, ticks = axis.ticks || [],\r\n                axisw = opts.labelWidth || 0, axish = opts.labelHeight || 0,\r\n                legacyStyles = axis.direction + \"Axis \" + axis.direction + axis.n + \"Axis\",\r\n                layer = \"flot-\" + axis.direction + \"-axis flot-\" + axis.direction + axis.n + \"-axis \" + legacyStyles,\r\n                font = opts.font || \"flot-tick-label tickLabel\";\r\n\r\n            for (var i = 0; i < ticks.length; ++i) {\r\n\r\n                var t = ticks[i];\r\n\r\n                if (!t.label)\r\n                    continue;\r\n\r\n                var info = surface.getTextInfo(layer, t.label, font);\r\n\r\n                if (opts.labelWidth == null)\r\n                    axisw = Math.max(axisw, info.width);\r\n                if (opts.labelHeight == null)\r\n                    axish = Math.max(axish, info.height);\r\n            }\r\n\r\n            axis.labelWidth = Math.ceil(axisw);\r\n            axis.labelHeight = Math.ceil(axish);\r\n        }\r\n\r\n        function allocateAxisBoxFirstPhase(axis) {\r\n            // find the bounding box of the axis by looking at label\r\n            // widths/heights and ticks, make room by diminishing the\r\n            // plotOffset; this first phase only looks at one\r\n            // dimension per axis, the other dimension depends on the\r\n            // other axes so will have to wait\r\n\r\n            var lw = axis.labelWidth,\r\n                lh = axis.labelHeight,\r\n                pos = axis.options.position,\r\n                tickLength = axis.options.tickLength,\r\n                axisMargin = options.grid.axisMargin,\r\n                padding = options.grid.labelMargin,\r\n                all = axis.direction == \"x\" ? xaxes : yaxes,\r\n                index, innermost;\r\n\r\n            // determine axis margin\r\n            var samePosition = $.grep(all, function (a) {\r\n                return a && a.options.position == pos && a.reserveSpace;\r\n            });\r\n            if ($.inArray(axis, samePosition) == samePosition.length - 1)\r\n                axisMargin = 0; // outermost\r\n\r\n            // determine tick length - if we're innermost, we can use \"full\"\r\n            if (tickLength == null) {\r\n                var sameDirection = $.grep(all, function (a) {\r\n                    return a && a.reserveSpace;\r\n                });\r\n\r\n                innermost = $.inArray(axis, sameDirection) == 0;\r\n                if (innermost)\r\n                    tickLength = \"full\";\r\n                else\r\n                    tickLength = 5;\r\n            }\r\n\r\n            if (!isNaN(+tickLength))\r\n                padding += +tickLength;\r\n\r\n            // compute box\r\n            if (axis.direction == \"x\") {\r\n                lh += padding;\r\n\r\n                if (pos == \"bottom\") {\r\n                    plotOffset.bottom += lh + axisMargin;\r\n                    axis.box = { top: surface.height - plotOffset.bottom, height: lh };\r\n                }\r\n                else {\r\n                    axis.box = { top: plotOffset.top + axisMargin, height: lh };\r\n                    plotOffset.top += lh + axisMargin;\r\n                }\r\n            }\r\n            else {\r\n                lw += padding;\r\n\r\n                if (pos == \"left\") {\r\n                    axis.box = { left: plotOffset.left + axisMargin, width: lw };\r\n                    plotOffset.left += lw + axisMargin;\r\n                }\r\n                else {\r\n                    plotOffset.right += lw + axisMargin;\r\n                    axis.box = { left: surface.width - plotOffset.right, width: lw };\r\n                }\r\n            }\r\n\r\n             // save for future reference\r\n            axis.position = pos;\r\n            axis.tickLength = tickLength;\r\n            axis.box.padding = padding;\r\n            axis.innermost = innermost;\r\n        }\r\n\r\n        function allocateAxisBoxSecondPhase(axis) {\r\n            // now that all axis boxes have been placed in one\r\n            // dimension, we can set the remaining dimension coordinates\r\n            if (axis.direction == \"x\") {\r\n                axis.box.left = plotOffset.left - axis.labelWidth / 2;\r\n                axis.box.width = surface.width - plotOffset.left - plotOffset.right + axis.labelWidth;\r\n            }\r\n            else {\r\n                axis.box.top = plotOffset.top - axis.labelHeight / 2;\r\n                axis.box.height = surface.height - plotOffset.bottom - plotOffset.top + axis.labelHeight;\r\n            }\r\n        }\r\n\r\n        function adjustLayoutForThingsStickingOut() {\r\n            // possibly adjust plot offset to ensure everything stays\r\n            // inside the canvas and isn't clipped off\r\n\r\n            var minMargin = options.grid.minBorderMargin,\r\n                margins = { x: 0, y: 0 }, i, axis;\r\n\r\n            // check stuff from the plot (FIXME: this should just read\r\n            // a value from the series, otherwise it's impossible to\r\n            // customize)\r\n            if (minMargin == null) {\r\n                minMargin = 0;\r\n                for (i = 0; i < series.length; ++i)\r\n                    minMargin = Math.max(minMargin, 2 * (series[i].points.radius + series[i].points.lineWidth/2));\r\n            }\r\n\r\n            margins.x = margins.y = Math.ceil(minMargin);\r\n\r\n            // check axis labels, note we don't check the actual\r\n            // labels but instead use the overall width/height to not\r\n            // jump as much around with replots\r\n            $.each(allAxes(), function (_, axis) {\r\n                var dir = axis.direction;\r\n                if (axis.reserveSpace)\r\n                    margins[dir] = Math.ceil(Math.max(margins[dir], (dir == \"x\" ? axis.labelWidth : axis.labelHeight) / 2));\r\n            });\r\n\r\n            plotOffset.left = Math.max(margins.x, plotOffset.left);\r\n            plotOffset.right = Math.max(margins.x, plotOffset.right);\r\n            plotOffset.top = Math.max(margins.y, plotOffset.top);\r\n            plotOffset.bottom = Math.max(margins.y, plotOffset.bottom);\r\n        }\r\n\r\n        function setupGrid() {\r\n            var i, axes = allAxes(), showGrid = options.grid.show;\r\n\r\n            // Initialize the plot's offset from the edge of the canvas\r\n\r\n            for (var a in plotOffset) {\r\n                var margin = options.grid.margin || 0;\r\n                plotOffset[a] = typeof margin == \"number\" ? margin : margin[a] || 0;\r\n            }\r\n\r\n            executeHooks(hooks.processOffset, [plotOffset]);\r\n\r\n            // If the grid is visible, add its border width to the offset\r\n\r\n            for (var a in plotOffset) {\r\n                if(typeof(options.grid.borderWidth) == \"object\") {\r\n                    plotOffset[a] += showGrid ? options.grid.borderWidth[a] : 0;\r\n                }\r\n                else {\r\n                    plotOffset[a] += showGrid ? options.grid.borderWidth : 0;\r\n                }\r\n            }\r\n\r\n            // init axes\r\n            $.each(axes, function (_, axis) {\r\n                axis.show = axis.options.show;\r\n                if (axis.show == null)\r\n                    axis.show = axis.used; // by default an axis is visible if it's got data\r\n\r\n                axis.reserveSpace = axis.show || axis.options.reserveSpace;\r\n\r\n                setRange(axis);\r\n            });\r\n\r\n            if (showGrid) {\r\n\r\n                var allocatedAxes = $.grep(axes, function (axis) { return axis.reserveSpace; });\r\n\r\n                $.each(allocatedAxes, function (_, axis) {\r\n                    // make the ticks\r\n                    setupTickGeneration(axis);\r\n                    setTicks(axis);\r\n                    snapRangeToTicks(axis, axis.ticks);\r\n                    // find labelWidth/Height for axis\r\n                    measureTickLabels(axis);\r\n                });\r\n\r\n                // with all dimensions calculated, we can compute the\r\n                // axis bounding boxes, start from the outside\r\n                // (reverse order)\r\n                for (i = allocatedAxes.length - 1; i >= 0; --i)\r\n                    allocateAxisBoxFirstPhase(allocatedAxes[i]);\r\n\r\n                // make sure we've got enough space for things that\r\n                // might stick out\r\n                adjustLayoutForThingsStickingOut();\r\n\r\n                $.each(allocatedAxes, function (_, axis) {\r\n                    allocateAxisBoxSecondPhase(axis);\r\n                });\r\n            }\r\n\r\n            plotWidth = surface.width - plotOffset.left - plotOffset.right;\r\n            plotHeight = surface.height - plotOffset.bottom - plotOffset.top;\r\n\r\n            // now we got the proper plot dimensions, we can compute the scaling\r\n            $.each(axes, function (_, axis) {\r\n                setTransformationHelpers(axis);\r\n            });\r\n\r\n            if (showGrid) {\r\n                drawAxisLabels();\r\n            }\r\n\r\n            insertLegend();\r\n        }\r\n\r\n        function setRange(axis) {\r\n            var opts = axis.options,\r\n                min = +(opts.min != null ? opts.min : axis.datamin),\r\n                max = +(opts.max != null ? opts.max : axis.datamax),\r\n                delta = max - min;\r\n\r\n            if (delta == 0.0) {\r\n                // degenerate case\r\n                var widen = max == 0 ? 1 : 0.01;\r\n\r\n                if (opts.min == null)\r\n                    min -= widen;\r\n                // always widen max if we couldn't widen min to ensure we\r\n                // don't fall into min == max which doesn't work\r\n                if (opts.max == null || opts.min != null)\r\n                    max += widen;\r\n            }\r\n            else {\r\n                // consider autoscaling\r\n                var margin = opts.autoscaleMargin;\r\n                if (margin != null) {\r\n                    if (opts.min == null) {\r\n                        min -= delta * margin;\r\n                        // make sure we don't go below zero if all values\r\n                        // are positive\r\n                        if (min < 0 && axis.datamin != null && axis.datamin >= 0)\r\n                            min = 0;\r\n                    }\r\n                    if (opts.max == null) {\r\n                        max += delta * margin;\r\n                        if (max > 0 && axis.datamax != null && axis.datamax <= 0)\r\n                            max = 0;\r\n                    }\r\n                }\r\n            }\r\n            axis.min = min;\r\n            axis.max = max;\r\n        }\r\n\r\n        function setupTickGeneration(axis) {\r\n            var opts = axis.options;\r\n\r\n            // estimate number of ticks\r\n            var noTicks;\r\n            if (typeof opts.ticks == \"number\" && opts.ticks > 0)\r\n                noTicks = opts.ticks;\r\n            else\r\n                // heuristic based on the model a*sqrt(x) fitted to\r\n                // some data points that seemed reasonable\r\n                noTicks = 0.3 * Math.sqrt(axis.direction == \"x\" ? surface.width : surface.height);\r\n\r\n            var delta = (axis.max - axis.min) / noTicks,\r\n                dec = -Math.floor(Math.log(delta) / Math.LN10),\r\n                maxDec = opts.tickDecimals;\r\n\r\n            if (maxDec != null && dec > maxDec) {\r\n                dec = maxDec;\r\n            }\r\n\r\n            var magn = Math.pow(10, -dec),\r\n                norm = delta / magn, // norm is between 1.0 and 10.0\r\n                size;\r\n\r\n            if (norm < 1.5) {\r\n                size = 1;\r\n            } else if (norm < 3) {\r\n                size = 2;\r\n                // special case for 2.5, requires an extra decimal\r\n                if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) {\r\n                    size = 2.5;\r\n                    ++dec;\r\n                }\r\n            } else if (norm < 7.5) {\r\n                size = 5;\r\n            } else {\r\n                size = 10;\r\n            }\r\n\r\n            size *= magn;\r\n\r\n            if (opts.minTickSize != null && size < opts.minTickSize) {\r\n                size = opts.minTickSize;\r\n            }\r\n\r\n            axis.delta = delta;\r\n            axis.tickDecimals = Math.max(0, maxDec != null ? maxDec : dec);\r\n            axis.tickSize = opts.tickSize || size;\r\n\r\n            // Time mode was moved to a plug-in in 0.8, but since so many people use this\r\n            // we'll add an especially friendly make sure they remembered to include it.\r\n\r\n            if (opts.mode == \"time\" && !axis.tickGenerator) {\r\n                throw new Error(\"Time mode requires the flot.time plugin.\");\r\n            }\r\n\r\n            // Flot supports base-10 axes; any other mode else is handled by a plug-in,\r\n            // like flot.time.js.\r\n\r\n            if (!axis.tickGenerator) {\r\n\r\n                axis.tickGenerator = function (axis) {\r\n\r\n                    var ticks = [],\r\n                        start = floorInBase(axis.min, axis.tickSize),\r\n                        i = 0,\r\n                        v = Number.NaN,\r\n                        prev;\r\n\r\n                    do {\r\n                        prev = v;\r\n                        v = start + i * axis.tickSize;\r\n                        ticks.push(v);\r\n                        ++i;\r\n                    } while (v < axis.max && v != prev);\r\n                    return ticks;\r\n                };\r\n\r\n\t\t\t\taxis.tickFormatter = function (value, axis) {\r\n\r\n\t\t\t\t\tvar factor = axis.tickDecimals ? Math.pow(10, axis.tickDecimals) : 1;\r\n\t\t\t\t\tvar formatted = \"\" + Math.round(value * factor) / factor;\r\n\r\n\t\t\t\t\t// If tickDecimals was specified, ensure that we have exactly that\r\n\t\t\t\t\t// much precision; otherwise default to the value's own precision.\r\n\r\n\t\t\t\t\tif (axis.tickDecimals != null) {\r\n\t\t\t\t\t\tvar decimal = formatted.indexOf(\".\");\r\n\t\t\t\t\t\tvar precision = decimal == -1 ? 0 : formatted.length - decimal - 1;\r\n\t\t\t\t\t\tif (precision < axis.tickDecimals) {\r\n\t\t\t\t\t\t\treturn (precision ? formatted : formatted + \".\") + (\"\" + factor).substr(1, axis.tickDecimals - precision);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n                    return formatted;\r\n                };\r\n            }\r\n\r\n            if ($.isFunction(opts.tickFormatter))\r\n                axis.tickFormatter = function (v, axis) { return \"\" + opts.tickFormatter(v, axis); };\r\n\r\n            if (opts.alignTicksWithAxis != null) {\r\n                var otherAxis = (axis.direction == \"x\" ? xaxes : yaxes)[opts.alignTicksWithAxis - 1];\r\n                if (otherAxis && otherAxis.used && otherAxis != axis) {\r\n                    // consider snapping min/max to outermost nice ticks\r\n                    var niceTicks = axis.tickGenerator(axis);\r\n                    if (niceTicks.length > 0) {\r\n                        if (opts.min == null)\r\n                            axis.min = Math.min(axis.min, niceTicks[0]);\r\n                        if (opts.max == null && niceTicks.length > 1)\r\n                            axis.max = Math.max(axis.max, niceTicks[niceTicks.length - 1]);\r\n                    }\r\n\r\n                    axis.tickGenerator = function (axis) {\r\n                        // copy ticks, scaled to this axis\r\n                        var ticks = [], v, i;\r\n                        for (i = 0; i < otherAxis.ticks.length; ++i) {\r\n                            v = (otherAxis.ticks[i].v - otherAxis.min) / (otherAxis.max - otherAxis.min);\r\n                            v = axis.min + v * (axis.max - axis.min);\r\n                            ticks.push(v);\r\n                        }\r\n                        return ticks;\r\n                    };\r\n\r\n                    // we might need an extra decimal since forced\r\n                    // ticks don't necessarily fit naturally\r\n                    if (!axis.mode && opts.tickDecimals == null) {\r\n                        var extraDec = Math.max(0, -Math.floor(Math.log(axis.delta) / Math.LN10) + 1),\r\n                            ts = axis.tickGenerator(axis);\r\n\r\n                        // only proceed if the tick interval rounded\r\n                        // with an extra decimal doesn't give us a\r\n                        // zero at end\r\n                        if (!(ts.length > 1 && /\\..*0$/.test((ts[1] - ts[0]).toFixed(extraDec))))\r\n                            axis.tickDecimals = extraDec;\r\n                    }\r\n                }\r\n            }\r\n        }\r\n\r\n        function setTicks(axis) {\r\n            var oticks = axis.options.ticks, ticks = [];\r\n            if (oticks == null || (typeof oticks == \"number\" && oticks > 0))\r\n                ticks = axis.tickGenerator(axis);\r\n            else if (oticks) {\r\n                if ($.isFunction(oticks))\r\n                    // generate the ticks\r\n                    ticks = oticks(axis);\r\n                else\r\n                    ticks = oticks;\r\n            }\r\n\r\n            // clean up/labelify the supplied ticks, copy them over\r\n            var i, v;\r\n            axis.ticks = [];\r\n            for (i = 0; i < ticks.length; ++i) {\r\n                var label = null;\r\n                var t = ticks[i];\r\n                if (typeof t == \"object\") {\r\n                    v = +t[0];\r\n                    if (t.length > 1)\r\n                        label = t[1];\r\n                }\r\n                else\r\n                    v = +t;\r\n                if (label == null)\r\n                    label = axis.tickFormatter(v, axis);\r\n                if (!isNaN(v))\r\n                    axis.ticks.push({ v: v, label: label });\r\n            }\r\n        }\r\n\r\n        function snapRangeToTicks(axis, ticks) {\r\n            if (axis.options.autoscaleMargin && ticks.length > 0) {\r\n                // snap to ticks\r\n                if (axis.options.min == null)\r\n                    axis.min = Math.min(axis.min, ticks[0].v);\r\n                if (axis.options.max == null && ticks.length > 1)\r\n                    axis.max = Math.max(axis.max, ticks[ticks.length - 1].v);\r\n            }\r\n        }\r\n\r\n        function draw() {\r\n\r\n            surface.clear();\r\n\r\n            executeHooks(hooks.drawBackground, [ctx]);\r\n\r\n            var grid = options.grid;\r\n\r\n            // draw background, if any\r\n            if (grid.show && grid.backgroundColor)\r\n                drawBackground();\r\n\r\n            if (grid.show && !grid.aboveData) {\r\n                drawGrid();\r\n            }\r\n\r\n            for (var i = 0; i < series.length; ++i) {\r\n                executeHooks(hooks.drawSeries, [ctx, series[i]]);\r\n                drawSeries(series[i]);\r\n            }\r\n\r\n            executeHooks(hooks.draw, [ctx]);\r\n\r\n            if (grid.show && grid.aboveData) {\r\n                drawGrid();\r\n            }\r\n\r\n            surface.render();\r\n        }\r\n\r\n        function extractRange(ranges, coord) {\r\n            var axis, from, to, key, axes = allAxes();\r\n\r\n            for (var i = 0; i < axes.length; ++i) {\r\n                axis = axes[i];\r\n                if (axis.direction == coord) {\r\n                    key = coord + axis.n + \"axis\";\r\n                    if (!ranges[key] && axis.n == 1)\r\n                        key = coord + \"axis\"; // support x1axis as xaxis\r\n                    if (ranges[key]) {\r\n                        from = ranges[key].from;\r\n                        to = ranges[key].to;\r\n                        break;\r\n                    }\r\n                }\r\n            }\r\n\r\n            // backwards-compat stuff - to be removed in future\r\n            if (!ranges[key]) {\r\n                axis = coord == \"x\" ? xaxes[0] : yaxes[0];\r\n                from = ranges[coord + \"1\"];\r\n                to = ranges[coord + \"2\"];\r\n            }\r\n\r\n            // auto-reverse as an added bonus\r\n            if (from != null && to != null && from > to) {\r\n                var tmp = from;\r\n                from = to;\r\n                to = tmp;\r\n            }\r\n\r\n            return { from: from, to: to, axis: axis };\r\n        }\r\n\r\n        function drawBackground() {\r\n            ctx.save();\r\n            ctx.translate(plotOffset.left, plotOffset.top);\r\n\r\n            ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, \"rgba(255, 255, 255, 0)\");\r\n            ctx.fillRect(0, 0, plotWidth, plotHeight);\r\n            ctx.restore();\r\n        }\r\n\r\n        function drawGrid() {\r\n            var i, axes, bw, bc;\r\n\r\n            ctx.save();\r\n            ctx.translate(plotOffset.left, plotOffset.top);\r\n\r\n            // draw markings\r\n            var markings = options.grid.markings;\r\n            if (markings) {\r\n                if ($.isFunction(markings)) {\r\n                    axes = plot.getAxes();\r\n                    // xmin etc. is backwards compatibility, to be\r\n                    // removed in the future\r\n                    axes.xmin = axes.xaxis.min;\r\n                    axes.xmax = axes.xaxis.max;\r\n                    axes.ymin = axes.yaxis.min;\r\n                    axes.ymax = axes.yaxis.max;\r\n\r\n                    markings = markings(axes);\r\n                }\r\n\r\n                for (i = 0; i < markings.length; ++i) {\r\n                    var m = markings[i],\r\n                        xrange = extractRange(m, \"x\"),\r\n                        yrange = extractRange(m, \"y\");\r\n\r\n                    // fill in missing\r\n                    if (xrange.from == null)\r\n                        xrange.from = xrange.axis.min;\r\n                    if (xrange.to == null)\r\n                        xrange.to = xrange.axis.max;\r\n                    if (yrange.from == null)\r\n                        yrange.from = yrange.axis.min;\r\n                    if (yrange.to == null)\r\n                        yrange.to = yrange.axis.max;\r\n\r\n                    // clip\r\n                    if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max ||\r\n                        yrange.to < yrange.axis.min || yrange.from > yrange.axis.max)\r\n                        continue;\r\n\r\n                    xrange.from = Math.max(xrange.from, xrange.axis.min);\r\n                    xrange.to = Math.min(xrange.to, xrange.axis.max);\r\n                    yrange.from = Math.max(yrange.from, yrange.axis.min);\r\n                    yrange.to = Math.min(yrange.to, yrange.axis.max);\r\n\r\n                    if (xrange.from == xrange.to && yrange.from == yrange.to)\r\n                        continue;\r\n\r\n                    // then draw\r\n                    xrange.from = xrange.axis.p2c(xrange.from);\r\n                    xrange.to = xrange.axis.p2c(xrange.to);\r\n                    yrange.from = yrange.axis.p2c(yrange.from);\r\n                    yrange.to = yrange.axis.p2c(yrange.to);\r\n\r\n                    if (xrange.from == xrange.to || yrange.from == yrange.to) {\r\n                        // draw line\r\n                        ctx.beginPath();\r\n                        ctx.strokeStyle = m.color || options.grid.markingsColor;\r\n                        ctx.lineWidth = m.lineWidth || options.grid.markingsLineWidth;\r\n                        ctx.moveTo(xrange.from, yrange.from);\r\n                        ctx.lineTo(xrange.to, yrange.to);\r\n                        ctx.stroke();\r\n                    }\r\n                    else {\r\n                        // fill area\r\n                        ctx.fillStyle = m.color || options.grid.markingsColor;\r\n                        ctx.fillRect(xrange.from, yrange.to,\r\n                                     xrange.to - xrange.from,\r\n                                     yrange.from - yrange.to);\r\n                    }\r\n                }\r\n            }\r\n\r\n            // draw the ticks\r\n            axes = allAxes();\r\n            bw = options.grid.borderWidth;\r\n\r\n            for (var j = 0; j < axes.length; ++j) {\r\n                var axis = axes[j], box = axis.box,\r\n                    t = axis.tickLength, x, y, xoff, yoff;\r\n                if (!axis.show || axis.ticks.length == 0)\r\n                    continue;\r\n\r\n                ctx.lineWidth = 1;\r\n\r\n                // find the edges\r\n                if (axis.direction == \"x\") {\r\n                    x = 0;\r\n                    if (t == \"full\")\r\n                        y = (axis.position == \"top\" ? 0 : plotHeight);\r\n                    else\r\n                        y = box.top - plotOffset.top + (axis.position == \"top\" ? box.height : 0);\r\n                }\r\n                else {\r\n                    y = 0;\r\n                    if (t == \"full\")\r\n                        x = (axis.position == \"left\" ? 0 : plotWidth);\r\n                    else\r\n                        x = box.left - plotOffset.left + (axis.position == \"left\" ? box.width : 0);\r\n                }\r\n\r\n                // draw tick bar\r\n                if (!axis.innermost) {\r\n                    ctx.strokeStyle = axis.options.color;\r\n                    ctx.beginPath();\r\n                    xoff = yoff = 0;\r\n                    if (axis.direction == \"x\")\r\n                        xoff = plotWidth + 1;\r\n                    else\r\n                        yoff = plotHeight + 1;\r\n\r\n                    if (ctx.lineWidth == 1) {\r\n                        if (axis.direction == \"x\") {\r\n                            y = Math.floor(y) + 0.5;\r\n                        } else {\r\n                            x = Math.floor(x) + 0.5;\r\n                        }\r\n                    }\r\n\r\n                    ctx.moveTo(x, y);\r\n                    ctx.lineTo(x + xoff, y + yoff);\r\n                    ctx.stroke();\r\n                }\r\n\r\n                // draw ticks\r\n\r\n                ctx.strokeStyle = axis.options.tickColor;\r\n\r\n                ctx.beginPath();\r\n                for (i = 0; i < axis.ticks.length; ++i) {\r\n                    var v = axis.ticks[i].v;\r\n\r\n                    xoff = yoff = 0;\r\n\r\n                    if (isNaN(v) || v < axis.min || v > axis.max\r\n                        // skip those lying on the axes if we got a border\r\n                        || (t == \"full\"\r\n                            && ((typeof bw == \"object\" && bw[axis.position] > 0) || bw > 0)\r\n                            && (v == axis.min || v == axis.max)))\r\n                        continue;\r\n\r\n                    if (axis.direction == \"x\") {\r\n                        x = axis.p2c(v);\r\n                        yoff = t == \"full\" ? -plotHeight : t;\r\n\r\n                        if (axis.position == \"top\")\r\n                            yoff = -yoff;\r\n                    }\r\n                    else {\r\n                        y = axis.p2c(v);\r\n                        xoff = t == \"full\" ? -plotWidth : t;\r\n\r\n                        if (axis.position == \"left\")\r\n                            xoff = -xoff;\r\n                    }\r\n\r\n                    if (ctx.lineWidth == 1) {\r\n                        if (axis.direction == \"x\")\r\n                            x = Math.floor(x) + 0.5;\r\n                        else\r\n                            y = Math.floor(y) + 0.5;\r\n                    }\r\n\r\n                    ctx.moveTo(x, y);\r\n                    ctx.lineTo(x + xoff, y + yoff);\r\n                }\r\n\r\n                ctx.stroke();\r\n            }\r\n\r\n\r\n            // draw border\r\n            if (bw) {\r\n                // If either borderWidth or borderColor is an object, then draw the border\r\n                // line by line instead of as one rectangle\r\n                bc = options.grid.borderColor;\r\n                if(typeof bw == \"object\" || typeof bc == \"object\") {\r\n                    if (typeof bw !== \"object\") {\r\n                        bw = {top: bw, right: bw, bottom: bw, left: bw};\r\n                    }\r\n                    if (typeof bc !== \"object\") {\r\n                        bc = {top: bc, right: bc, bottom: bc, left: bc};\r\n                    }\r\n\r\n                    if (bw.top > 0) {\r\n                        ctx.strokeStyle = bc.top;\r\n                        ctx.lineWidth = bw.top;\r\n                        ctx.beginPath();\r\n                        ctx.moveTo(0 - bw.left, 0 - bw.top/2);\r\n                        ctx.lineTo(plotWidth, 0 - bw.top/2);\r\n                        ctx.stroke();\r\n                    }\r\n\r\n                    if (bw.right > 0) {\r\n                        ctx.strokeStyle = bc.right;\r\n                        ctx.lineWidth = bw.right;\r\n                        ctx.beginPath();\r\n                        ctx.moveTo(plotWidth + bw.right / 2, 0 - bw.top);\r\n                        ctx.lineTo(plotWidth + bw.right / 2, plotHeight);\r\n                        ctx.stroke();\r\n                    }\r\n\r\n                    if (bw.bottom > 0) {\r\n                        ctx.strokeStyle = bc.bottom;\r\n                        ctx.lineWidth = bw.bottom;\r\n                        ctx.beginPath();\r\n                        ctx.moveTo(plotWidth + bw.right, plotHeight + bw.bottom / 2);\r\n                        ctx.lineTo(0, plotHeight + bw.bottom / 2);\r\n                        ctx.stroke();\r\n                    }\r\n\r\n                    if (bw.left > 0) {\r\n                        ctx.strokeStyle = bc.left;\r\n                        ctx.lineWidth = bw.left;\r\n                        ctx.beginPath();\r\n                        ctx.moveTo(0 - bw.left/2, plotHeight + bw.bottom);\r\n                        ctx.lineTo(0- bw.left/2, 0);\r\n                        ctx.stroke();\r\n                    }\r\n                }\r\n                else {\r\n                    ctx.lineWidth = bw;\r\n                    ctx.strokeStyle = options.grid.borderColor;\r\n                    ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw);\r\n                }\r\n            }\r\n\r\n            ctx.restore();\r\n        }\r\n\r\n        function drawAxisLabels() {\r\n\r\n            $.each(allAxes(), function (_, axis) {\r\n                if (!axis.show || axis.ticks.length == 0)\r\n                    return;\r\n\r\n                var box = axis.box,\r\n                    legacyStyles = axis.direction + \"Axis \" + axis.direction + axis.n + \"Axis\",\r\n                    layer = \"flot-\" + axis.direction + \"-axis flot-\" + axis.direction + axis.n + \"-axis \" + legacyStyles,\r\n                    font = axis.options.font || \"flot-tick-label tickLabel\",\r\n                    tick, x, y, halign, valign;\r\n\r\n                surface.removeText(layer);\r\n\r\n                for (var i = 0; i < axis.ticks.length; ++i) {\r\n\r\n                    tick = axis.ticks[i];\r\n                    if (!tick.label || tick.v < axis.min || tick.v > axis.max)\r\n                        continue;\r\n\r\n                    if (axis.direction == \"x\") {\r\n                        halign = \"center\";\r\n                        x = plotOffset.left + axis.p2c(tick.v);\r\n                        if (axis.position == \"bottom\") {\r\n                            y = box.top + box.padding;\r\n                        } else {\r\n                            y = box.top + box.height - box.padding;\r\n                            valign = \"bottom\";\r\n                        }\r\n                    } else {\r\n                        valign = \"middle\";\r\n                        y = plotOffset.top + axis.p2c(tick.v);\r\n                        if (axis.position == \"left\") {\r\n                            x = box.left + box.width - box.padding;\r\n                            halign = \"right\";\r\n                        } else {\r\n                            x = box.left + box.padding;\r\n                        }\r\n                    }\r\n\r\n                    surface.addText(layer, x, y, tick.label, font, null, halign, valign);\r\n                }\r\n            });\r\n        }\r\n\r\n        function drawSeries(series) {\r\n            if (series.lines.show)\r\n                drawSeriesLines(series);\r\n            if (series.bars.show)\r\n                drawSeriesBars(series);\r\n            if (series.points.show)\r\n                drawSeriesPoints(series);\r\n        }\r\n\r\n        function drawSeriesLines(series) {\r\n            function plotLine(datapoints, xoffset, yoffset, axisx, axisy) {\r\n                var points = datapoints.points,\r\n                    ps = datapoints.pointsize,\r\n                    prevx = null, prevy = null;\r\n\r\n                ctx.beginPath();\r\n                for (var i = ps; i < points.length; i += ps) {\r\n                    var x1 = points[i - ps], y1 = points[i - ps + 1],\r\n                        x2 = points[i], y2 = points[i + 1];\r\n\r\n                    if (x1 == null || x2 == null)\r\n                        continue;\r\n\r\n                    // clip with ymin\r\n                    if (y1 <= y2 && y1 < axisy.min) {\r\n                        if (y2 < axisy.min)\r\n                            continue;   // line segment is outside\r\n                        // compute new intersection point\r\n                        x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y1 = axisy.min;\r\n                    }\r\n                    else if (y2 <= y1 && y2 < axisy.min) {\r\n                        if (y1 < axisy.min)\r\n                            continue;\r\n                        x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y2 = axisy.min;\r\n                    }\r\n\r\n                    // clip with ymax\r\n                    if (y1 >= y2 && y1 > axisy.max) {\r\n                        if (y2 > axisy.max)\r\n                            continue;\r\n                        x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y1 = axisy.max;\r\n                    }\r\n                    else if (y2 >= y1 && y2 > axisy.max) {\r\n                        if (y1 > axisy.max)\r\n                            continue;\r\n                        x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y2 = axisy.max;\r\n                    }\r\n\r\n                    // clip with xmin\r\n                    if (x1 <= x2 && x1 < axisx.min) {\r\n                        if (x2 < axisx.min)\r\n                            continue;\r\n                        y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x1 = axisx.min;\r\n                    }\r\n                    else if (x2 <= x1 && x2 < axisx.min) {\r\n                        if (x1 < axisx.min)\r\n                            continue;\r\n                        y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x2 = axisx.min;\r\n                    }\r\n\r\n                    // clip with xmax\r\n                    if (x1 >= x2 && x1 > axisx.max) {\r\n                        if (x2 > axisx.max)\r\n                            continue;\r\n                        y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x1 = axisx.max;\r\n                    }\r\n                    else if (x2 >= x1 && x2 > axisx.max) {\r\n                        if (x1 > axisx.max)\r\n                            continue;\r\n                        y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x2 = axisx.max;\r\n                    }\r\n\r\n                    if (x1 != prevx || y1 != prevy)\r\n                        ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset);\r\n\r\n                    prevx = x2;\r\n                    prevy = y2;\r\n                    ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset);\r\n                }\r\n                ctx.stroke();\r\n            }\r\n\r\n            function plotLineArea(datapoints, axisx, axisy) {\r\n                var points = datapoints.points,\r\n                    ps = datapoints.pointsize,\r\n                    bottom = Math.min(Math.max(0, axisy.min), axisy.max),\r\n                    i = 0, top, areaOpen = false,\r\n                    ypos = 1, segmentStart = 0, segmentEnd = 0;\r\n\r\n                // we process each segment in two turns, first forward\r\n                // direction to sketch out top, then once we hit the\r\n                // end we go backwards to sketch the bottom\r\n                while (true) {\r\n                    if (ps > 0 && i > points.length + ps)\r\n                        break;\r\n\r\n                    i += ps; // ps is negative if going backwards\r\n\r\n                    var x1 = points[i - ps],\r\n                        y1 = points[i - ps + ypos],\r\n                        x2 = points[i], y2 = points[i + ypos];\r\n\r\n                    if (areaOpen) {\r\n                        if (ps > 0 && x1 != null && x2 == null) {\r\n                            // at turning point\r\n                            segmentEnd = i;\r\n                            ps = -ps;\r\n                            ypos = 2;\r\n                            continue;\r\n                        }\r\n\r\n                        if (ps < 0 && i == segmentStart + ps) {\r\n                            // done with the reverse sweep\r\n                            ctx.fill();\r\n                            areaOpen = false;\r\n                            ps = -ps;\r\n                            ypos = 1;\r\n                            i = segmentStart = segmentEnd + ps;\r\n                            continue;\r\n                        }\r\n                    }\r\n\r\n                    if (x1 == null || x2 == null)\r\n                        continue;\r\n\r\n                    // clip x values\r\n\r\n                    // clip with xmin\r\n                    if (x1 <= x2 && x1 < axisx.min) {\r\n                        if (x2 < axisx.min)\r\n                            continue;\r\n                        y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x1 = axisx.min;\r\n                    }\r\n                    else if (x2 <= x1 && x2 < axisx.min) {\r\n                        if (x1 < axisx.min)\r\n                            continue;\r\n                        y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x2 = axisx.min;\r\n                    }\r\n\r\n                    // clip with xmax\r\n                    if (x1 >= x2 && x1 > axisx.max) {\r\n                        if (x2 > axisx.max)\r\n                            continue;\r\n                        y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x1 = axisx.max;\r\n                    }\r\n                    else if (x2 >= x1 && x2 > axisx.max) {\r\n                        if (x1 > axisx.max)\r\n                            continue;\r\n                        y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;\r\n                        x2 = axisx.max;\r\n                    }\r\n\r\n                    if (!areaOpen) {\r\n                        // open area\r\n                        ctx.beginPath();\r\n                        ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom));\r\n                        areaOpen = true;\r\n                    }\r\n\r\n                    // now first check the case where both is outside\r\n                    if (y1 >= axisy.max && y2 >= axisy.max) {\r\n                        ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max));\r\n                        ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max));\r\n                        continue;\r\n                    }\r\n                    else if (y1 <= axisy.min && y2 <= axisy.min) {\r\n                        ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min));\r\n                        ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min));\r\n                        continue;\r\n                    }\r\n\r\n                    // else it's a bit more complicated, there might\r\n                    // be a flat maxed out rectangle first, then a\r\n                    // triangular cutout or reverse; to find these\r\n                    // keep track of the current x values\r\n                    var x1old = x1, x2old = x2;\r\n\r\n                    // clip the y values, without shortcutting, we\r\n                    // go through all cases in turn\r\n\r\n                    // clip with ymin\r\n                    if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) {\r\n                        x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y1 = axisy.min;\r\n                    }\r\n                    else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) {\r\n                        x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y2 = axisy.min;\r\n                    }\r\n\r\n                    // clip with ymax\r\n                    if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) {\r\n                        x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y1 = axisy.max;\r\n                    }\r\n                    else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) {\r\n                        x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;\r\n                        y2 = axisy.max;\r\n                    }\r\n\r\n                    // if the x value was changed we got a rectangle\r\n                    // to fill\r\n                    if (x1 != x1old) {\r\n                        ctx.lineTo(axisx.p2c(x1old), axisy.p2c(y1));\r\n                        // it goes to (x1, y1), but we fill that below\r\n                    }\r\n\r\n                    // fill triangular section, this sometimes result\r\n                    // in redundant points if (x1, y1) hasn't changed\r\n                    // from previous line to, but we just ignore that\r\n                    ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1));\r\n                    ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2));\r\n\r\n                    // fill the other rectangle if it's there\r\n                    if (x2 != x2old) {\r\n                        ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2));\r\n                        ctx.lineTo(axisx.p2c(x2old), axisy.p2c(y2));\r\n                    }\r\n                }\r\n            }\r\n\r\n            ctx.save();\r\n            ctx.translate(plotOffset.left, plotOffset.top);\r\n            ctx.lineJoin = \"round\";\r\n\r\n            var lw = series.lines.lineWidth,\r\n                sw = series.shadowSize;\r\n            // FIXME: consider another form of shadow when filling is turned on\r\n            if (lw > 0 && sw > 0) {\r\n                // draw shadow as a thick and thin line with transparency\r\n                ctx.lineWidth = sw;\r\n                ctx.strokeStyle = \"rgba(0,0,0,0.1)\";\r\n                // position shadow at angle from the mid of line\r\n                var angle = Math.PI/18;\r\n                plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/2), Math.cos(angle) * (lw/2 + sw/2), series.xaxis, series.yaxis);\r\n                ctx.lineWidth = sw/2;\r\n                plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/4), Math.cos(angle) * (lw/2 + sw/4), series.xaxis, series.yaxis);\r\n            }\r\n\r\n            ctx.lineWidth = lw;\r\n            ctx.strokeStyle = series.color;\r\n            var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight);\r\n            if (fillStyle) {\r\n                ctx.fillStyle = fillStyle;\r\n                plotLineArea(series.datapoints, series.xaxis, series.yaxis);\r\n            }\r\n\r\n            if (lw > 0)\r\n                plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis);\r\n            ctx.restore();\r\n        }\r\n\r\n        function drawSeriesPoints(series) {\r\n            function plotPoints(datapoints, radius, fillStyle, offset, shadow, axisx, axisy, symbol) {\r\n                var points = datapoints.points, ps = datapoints.pointsize;\r\n\r\n                for (var i = 0; i < points.length; i += ps) {\r\n                    var x = points[i], y = points[i + 1];\r\n                    if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max)\r\n                        continue;\r\n\r\n                    ctx.beginPath();\r\n                    x = axisx.p2c(x);\r\n                    y = axisy.p2c(y) + offset;\r\n                    if (symbol == \"circle\")\r\n                        ctx.arc(x, y, radius, 0, shadow ? Math.PI : Math.PI * 2, false);\r\n                    else\r\n                        symbol(ctx, x, y, radius, shadow);\r\n                    ctx.closePath();\r\n\r\n                    if (fillStyle) {\r\n                        ctx.fillStyle = fillStyle;\r\n                        ctx.fill();\r\n                    }\r\n                    ctx.stroke();\r\n                }\r\n            }\r\n\r\n            ctx.save();\r\n            ctx.translate(plotOffset.left, plotOffset.top);\r\n\r\n            var lw = series.points.lineWidth,\r\n                sw = series.shadowSize,\r\n                radius = series.points.radius,\r\n                symbol = series.points.symbol;\r\n\r\n            // If the user sets the line width to 0, we change it to a very \r\n            // small value. A line width of 0 seems to force the default of 1.\r\n            // Doing the conditional here allows the shadow setting to still be \r\n            // optional even with a lineWidth of 0.\r\n\r\n            if( lw == 0 )\r\n                lw = 0.0001;\r\n\r\n            if (lw > 0 && sw > 0) {\r\n                // draw shadow in two steps\r\n                var w = sw / 2;\r\n                ctx.lineWidth = w;\r\n                ctx.strokeStyle = \"rgba(0,0,0,0.1)\";\r\n                plotPoints(series.datapoints, radius, null, w + w/2, true,\r\n                           series.xaxis, series.yaxis, symbol);\r\n\r\n                ctx.strokeStyle = \"rgba(0,0,0,0.2)\";\r\n                plotPoints(series.datapoints, radius, null, w/2, true,\r\n                           series.xaxis, series.yaxis, symbol);\r\n            }\r\n\r\n            ctx.lineWidth = lw;\r\n            ctx.strokeStyle = series.color;\r\n            plotPoints(series.datapoints, radius,\r\n                       getFillStyle(series.points, series.color), 0, false,\r\n                       series.xaxis, series.yaxis, symbol);\r\n            ctx.restore();\r\n        }\r\n\r\n        function drawBar(x, y, b, barLeft, barRight, offset, fillStyleCallback, axisx, axisy, c, horizontal, lineWidth) {\r\n            var left, right, bottom, top,\r\n                drawLeft, drawRight, drawTop, drawBottom,\r\n                tmp;\r\n\r\n            // in horizontal mode, we start the bar from the left\r\n            // instead of from the bottom so it appears to be\r\n            // horizontal rather than vertical\r\n            if (horizontal) {\r\n                drawBottom = drawRight = drawTop = true;\r\n                drawLeft = false;\r\n                left = b;\r\n                right = x;\r\n                top = y + barLeft;\r\n                bottom = y + barRight;\r\n\r\n                // account for negative bars\r\n                if (right < left) {\r\n                    tmp = right;\r\n                    right = left;\r\n                    left = tmp;\r\n                    drawLeft = true;\r\n                    drawRight = false;\r\n                }\r\n            }\r\n            else {\r\n                drawLeft = drawRight = drawTop = true;\r\n                drawBottom = false;\r\n                left = x + barLeft;\r\n                right = x + barRight;\r\n                bottom = b;\r\n                top = y;\r\n\r\n                // account for negative bars\r\n                if (top < bottom) {\r\n                    tmp = top;\r\n                    top = bottom;\r\n                    bottom = tmp;\r\n                    drawBottom = true;\r\n                    drawTop = false;\r\n                }\r\n            }\r\n\r\n            // clip\r\n            if (right < axisx.min || left > axisx.max ||\r\n                top < axisy.min || bottom > axisy.max)\r\n                return;\r\n\r\n            if (left < axisx.min) {\r\n                left = axisx.min;\r\n                drawLeft = false;\r\n            }\r\n\r\n            if (right > axisx.max) {\r\n                right = axisx.max;\r\n                drawRight = false;\r\n            }\r\n\r\n            if (bottom < axisy.min) {\r\n                bottom = axisy.min;\r\n                drawBottom = false;\r\n            }\r\n\r\n            if (top > axisy.max) {\r\n                top = axisy.max;\r\n                drawTop = false;\r\n            }\r\n\r\n            left = axisx.p2c(left);\r\n            bottom = axisy.p2c(bottom);\r\n            right = axisx.p2c(right);\r\n            top = axisy.p2c(top);\r\n\r\n            // fill the bar\r\n            if (fillStyleCallback) {\r\n                c.beginPath();\r\n                c.moveTo(left, bottom);\r\n                c.lineTo(left, top);\r\n                c.lineTo(right, top);\r\n                c.lineTo(right, bottom);\r\n                c.fillStyle = fillStyleCallback(bottom, top);\r\n                c.fill();\r\n            }\r\n\r\n            // draw outline\r\n            if (lineWidth > 0 && (drawLeft || drawRight || drawTop || drawBottom)) {\r\n                c.beginPath();\r\n\r\n                // FIXME: inline moveTo is buggy with excanvas\r\n                c.moveTo(left, bottom + offset);\r\n                if (drawLeft)\r\n                    c.lineTo(left, top + offset);\r\n                else\r\n                    c.moveTo(left, top + offset);\r\n                if (drawTop)\r\n                    c.lineTo(right, top + offset);\r\n                else\r\n                    c.moveTo(right, top + offset);\r\n                if (drawRight)\r\n                    c.lineTo(right, bottom + offset);\r\n                else\r\n                    c.moveTo(right, bottom + offset);\r\n                if (drawBottom)\r\n                    c.lineTo(left, bottom + offset);\r\n                else\r\n                    c.moveTo(left, bottom + offset);\r\n                c.stroke();\r\n            }\r\n        }\r\n\r\n        function drawSeriesBars(series) {\r\n            function plotBars(datapoints, barLeft, barRight, offset, fillStyleCallback, axisx, axisy) {\r\n                var points = datapoints.points, ps = datapoints.pointsize;\r\n\r\n                for (var i = 0; i < points.length; i += ps) {\r\n                    if (points[i] == null)\r\n                        continue;\r\n                    drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, offset, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal, series.bars.lineWidth);\r\n                }\r\n            }\r\n\r\n            ctx.save();\r\n            ctx.translate(plotOffset.left, plotOffset.top);\r\n\r\n            // FIXME: figure out a way to add shadows (for instance along the right edge)\r\n            ctx.lineWidth = series.bars.lineWidth;\r\n            ctx.strokeStyle = series.color;\r\n\r\n            var barLeft;\r\n\r\n            switch (series.bars.align) {\r\n                case \"left\":\r\n                    barLeft = 0;\r\n                    break;\r\n                case \"right\":\r\n                    barLeft = -series.bars.barWidth;\r\n                    break;\r\n                case \"center\":\r\n                    barLeft = -series.bars.barWidth / 2;\r\n                    break;\r\n                default:\r\n                    throw new Error(\"Invalid bar alignment: \" + series.bars.align);\r\n            }\r\n\r\n            var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null;\r\n            plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, 0, fillStyleCallback, series.xaxis, series.yaxis);\r\n            ctx.restore();\r\n        }\r\n\r\n        function getFillStyle(filloptions, seriesColor, bottom, top) {\r\n            var fill = filloptions.fill;\r\n            if (!fill)\r\n                return null;\r\n\r\n            if (filloptions.fillColor)\r\n                return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor);\r\n\r\n            var c = $.color.parse(seriesColor);\r\n            c.a = typeof fill == \"number\" ? fill : 0.4;\r\n            c.normalize();\r\n            return c.toString();\r\n        }\r\n\r\n        function insertLegend() {\r\n\r\n            placeholder.find(\".legend\").remove();\r\n\r\n            if (!options.legend.show)\r\n                return;\r\n\r\n            var fragments = [], entries = [], rowStarted = false,\r\n                lf = options.legend.labelFormatter, s, label;\r\n\r\n            // Build a list of legend entries, with each having a label and a color\r\n\r\n            for (var i = 0; i < series.length; ++i) {\r\n                s = series[i];\r\n                if (s.label) {\r\n                    label = lf ? lf(s.label, s) : s.label;\r\n                    if (label) {\r\n                        entries.push({\r\n                            label: label,\r\n                            color: s.color\r\n                        });\r\n                    }\r\n                }\r\n            }\r\n\r\n            // Sort the legend using either the default or a custom comparator\r\n\r\n            if (options.legend.sorted) {\r\n                if ($.isFunction(options.legend.sorted)) {\r\n                    entries.sort(options.legend.sorted);\r\n                } else if (options.legend.sorted == \"reverse\") {\r\n                \tentries.reverse();\r\n                } else {\r\n                    var ascending = options.legend.sorted != \"descending\";\r\n                    entries.sort(function(a, b) {\r\n                        return a.label == b.label ? 0 : (\r\n                            (a.label < b.label) != ascending ? 1 : -1   // Logical XOR\r\n                        );\r\n                    });\r\n                }\r\n            }\r\n\r\n            // Generate markup for the list of entries, in their final order\r\n\r\n            for (var i = 0; i < entries.length; ++i) {\r\n\r\n                var entry = entries[i];\r\n\r\n                if (i % options.legend.noColumns == 0) {\r\n                    if (rowStarted)\r\n                        fragments.push('</tr>');\r\n                    fragments.push('<tr>');\r\n                    rowStarted = true;\r\n                }\r\n\r\n                fragments.push(\r\n                    '<td class=\"legendColorBox\"><div style=\"border:1px solid ' + options.legend.labelBoxBorderColor + ';padding:1px\"><div style=\"width:4px;height:0;border:5px solid ' + entry.color + ';overflow:hidden\"></div></div></td>' +\r\n                    '<td class=\"legendLabel\">' + entry.label + '</td>'\r\n                );\r\n            }\r\n\r\n            if (rowStarted)\r\n                fragments.push('</tr>');\r\n\r\n            if (fragments.length == 0)\r\n                return;\r\n\r\n            var table = '<table style=\"font-size:smaller;color:' + options.grid.color + '\">' + fragments.join(\"\") + '</table>';\r\n            if (options.legend.container != null)\r\n                $(options.legend.container).html(table);\r\n            else {\r\n                var pos = \"\",\r\n                    p = options.legend.position,\r\n                    m = options.legend.margin;\r\n                if (m[0] == null)\r\n                    m = [m, m];\r\n                if (p.charAt(0) == \"n\")\r\n                    pos += 'top:' + (m[1] + plotOffset.top) + 'px;';\r\n                else if (p.charAt(0) == \"s\")\r\n                    pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;';\r\n                if (p.charAt(1) == \"e\")\r\n                    pos += 'right:' + (m[0] + plotOffset.right) + 'px;';\r\n                else if (p.charAt(1) == \"w\")\r\n                    pos += 'left:' + (m[0] + plotOffset.left) + 'px;';\r\n                var legend = $('<div class=\"legend\">' + table.replace('style=\"', 'style=\"position:absolute;' + pos +';') + '</div>').appendTo(placeholder);\r\n                if (options.legend.backgroundOpacity != 0.0) {\r\n                    // put in the transparent background\r\n                    // separately to avoid blended labels and\r\n                    // label boxes\r\n                    var c = options.legend.backgroundColor;\r\n                    if (c == null) {\r\n                        c = options.grid.backgroundColor;\r\n                        if (c && typeof c == \"string\")\r\n                            c = $.color.parse(c);\r\n                        else\r\n                            c = $.color.extract(legend, 'background-color');\r\n                        c.a = 1;\r\n                        c = c.toString();\r\n                    }\r\n                    var div = legend.children();\r\n                    $('<div style=\"position:absolute;width:' + div.width() + 'px;height:' + div.height() + 'px;' + pos +'background-color:' + c + ';\"> </div>').prependTo(legend).css('opacity', options.legend.backgroundOpacity);\r\n                }\r\n            }\r\n        }\r\n\r\n\r\n        // interactive features\r\n\r\n        var highlights = [],\r\n            redrawTimeout = null;\r\n\r\n        // returns the data item the mouse is over, or null if none is found\r\n        function findNearbyItem(mouseX, mouseY, seriesFilter) {\r\n            var maxDistance = options.grid.mouseActiveRadius,\r\n                smallestDistance = maxDistance * maxDistance + 1,\r\n                item = null, foundPoint = false, i, j, ps;\r\n\r\n            for (i = series.length - 1; i >= 0; --i) {\r\n                if (!seriesFilter(series[i]))\r\n                    continue;\r\n\r\n                var s = series[i],\r\n                    axisx = s.xaxis,\r\n                    axisy = s.yaxis,\r\n                    points = s.datapoints.points,\r\n                    mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster\r\n                    my = axisy.c2p(mouseY),\r\n                    maxx = maxDistance / axisx.scale,\r\n                    maxy = maxDistance / axisy.scale;\r\n\r\n                ps = s.datapoints.pointsize;\r\n                // with inverse transforms, we can't use the maxx/maxy\r\n                // optimization, sadly\r\n                if (axisx.options.inverseTransform)\r\n                    maxx = Number.MAX_VALUE;\r\n                if (axisy.options.inverseTransform)\r\n                    maxy = Number.MAX_VALUE;\r\n\r\n                if (s.lines.show || s.points.show) {\r\n                    for (j = 0; j < points.length; j += ps) {\r\n                        var x = points[j], y = points[j + 1];\r\n                        if (x == null)\r\n                            continue;\r\n\r\n                        // For points and lines, the cursor must be within a\r\n                        // certain distance to the data point\r\n                        if (x - mx > maxx || x - mx < -maxx ||\r\n                            y - my > maxy || y - my < -maxy)\r\n                            continue;\r\n\r\n                        // We have to calculate distances in pixels, not in\r\n                        // data units, because the scales of the axes may be different\r\n                        var dx = Math.abs(axisx.p2c(x) - mouseX),\r\n                            dy = Math.abs(axisy.p2c(y) - mouseY),\r\n                            dist = dx * dx + dy * dy; // we save the sqrt\r\n\r\n                        // use <= to ensure last point takes precedence\r\n                        // (last generally means on top of)\r\n                        if (dist < smallestDistance) {\r\n                            smallestDistance = dist;\r\n                            item = [i, j / ps];\r\n                        }\r\n                    }\r\n                }\r\n\r\n                if (s.bars.show && !item) { // no other point can be nearby\r\n                    var barLeft = s.bars.align == \"left\" ? 0 : -s.bars.barWidth/2,\r\n                        barRight = barLeft + s.bars.barWidth;\r\n\r\n                    for (j = 0; j < points.length; j += ps) {\r\n                        var x = points[j], y = points[j + 1], b = points[j + 2];\r\n                        if (x == null)\r\n                            continue;\r\n\r\n                        // for a bar graph, the cursor must be inside the bar\r\n                        if (series[i].bars.horizontal ?\r\n                            (mx <= Math.max(b, x) && mx >= Math.min(b, x) &&\r\n                             my >= y + barLeft && my <= y + barRight) :\r\n                            (mx >= x + barLeft && mx <= x + barRight &&\r\n                             my >= Math.min(b, y) && my <= Math.max(b, y)))\r\n                                item = [i, j / ps];\r\n                    }\r\n                }\r\n            }\r\n\r\n            if (item) {\r\n                i = item[0];\r\n                j = item[1];\r\n                ps = series[i].datapoints.pointsize;\r\n\r\n                return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps),\r\n                         dataIndex: j,\r\n                         series: series[i],\r\n                         seriesIndex: i };\r\n            }\r\n\r\n            return null;\r\n        }\r\n\r\n        function onMouseMove(e) {\r\n            if (options.grid.hoverable)\r\n                triggerClickHoverEvent(\"plothover\", e,\r\n                                       function (s) { return s[\"hoverable\"] != false; });\r\n        }\r\n\r\n        function onMouseLeave(e) {\r\n            if (options.grid.hoverable)\r\n                triggerClickHoverEvent(\"plothover\", e,\r\n                                       function (s) { return false; });\r\n        }\r\n\r\n        function onClick(e) {\r\n            triggerClickHoverEvent(\"plotclick\", e,\r\n                                   function (s) { return s[\"clickable\"] != false; });\r\n        }\r\n\r\n        // trigger click or hover event (they send the same parameters\r\n        // so we share their code)\r\n        function triggerClickHoverEvent(eventname, event, seriesFilter) {\r\n            var offset = eventHolder.offset(),\r\n                canvasX = event.pageX - offset.left - plotOffset.left,\r\n                canvasY = event.pageY - offset.top - plotOffset.top,\r\n            pos = canvasToAxisCoords({ left: canvasX, top: canvasY });\r\n\r\n            pos.pageX = event.pageX;\r\n            pos.pageY = event.pageY;\r\n\r\n            var item = findNearbyItem(canvasX, canvasY, seriesFilter);\r\n\r\n            if (item) {\r\n                // fill in mouse pos for any listeners out there\r\n                item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left, 10);\r\n                item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top, 10);\r\n            }\r\n\r\n            if (options.grid.autoHighlight) {\r\n                // clear auto-highlights\r\n                for (var i = 0; i < highlights.length; ++i) {\r\n                    var h = highlights[i];\r\n                    if (h.auto == eventname &&\r\n                        !(item && h.series == item.series &&\r\n                          h.point[0] == item.datapoint[0] &&\r\n                          h.point[1] == item.datapoint[1]))\r\n                        unhighlight(h.series, h.point);\r\n                }\r\n\r\n                if (item)\r\n                    highlight(item.series, item.datapoint, eventname);\r\n            }\r\n\r\n            placeholder.trigger(eventname, [ pos, item ]);\r\n        }\r\n\r\n        function triggerRedrawOverlay() {\r\n            var t = options.interaction.redrawOverlayInterval;\r\n            if (t == -1) {      // skip event queue\r\n                drawOverlay();\r\n                return;\r\n            }\r\n\r\n            if (!redrawTimeout)\r\n                redrawTimeout = setTimeout(drawOverlay, t);\r\n        }\r\n\r\n        function drawOverlay() {\r\n            redrawTimeout = null;\r\n\r\n            // draw highlights\r\n            octx.save();\r\n            overlay.clear();\r\n            octx.translate(plotOffset.left, plotOffset.top);\r\n\r\n            var i, hi;\r\n            for (i = 0; i < highlights.length; ++i) {\r\n                hi = highlights[i];\r\n\r\n                if (hi.series.bars.show)\r\n                    drawBarHighlight(hi.series, hi.point);\r\n                else\r\n                    drawPointHighlight(hi.series, hi.point);\r\n            }\r\n            octx.restore();\r\n\r\n            executeHooks(hooks.drawOverlay, [octx]);\r\n        }\r\n\r\n        function highlight(s, point, auto) {\r\n            if (typeof s == \"number\")\r\n                s = series[s];\r\n\r\n            if (typeof point == \"number\") {\r\n                var ps = s.datapoints.pointsize;\r\n                point = s.datapoints.points.slice(ps * point, ps * (point + 1));\r\n            }\r\n\r\n            var i = indexOfHighlight(s, point);\r\n            if (i == -1) {\r\n                highlights.push({ series: s, point: point, auto: auto });\r\n\r\n                triggerRedrawOverlay();\r\n            }\r\n            else if (!auto)\r\n                highlights[i].auto = false;\r\n        }\r\n\r\n        function unhighlight(s, point) {\r\n            if (s == null && point == null) {\r\n                highlights = [];\r\n                triggerRedrawOverlay();\r\n                return;\r\n            }\r\n\r\n            if (typeof s == \"number\")\r\n                s = series[s];\r\n\r\n            if (typeof point == \"number\") {\r\n                var ps = s.datapoints.pointsize;\r\n                point = s.datapoints.points.slice(ps * point, ps * (point + 1));\r\n            }\r\n\r\n            var i = indexOfHighlight(s, point);\r\n            if (i != -1) {\r\n                highlights.splice(i, 1);\r\n\r\n                triggerRedrawOverlay();\r\n            }\r\n        }\r\n\r\n        function indexOfHighlight(s, p) {\r\n            for (var i = 0; i < highlights.length; ++i) {\r\n                var h = highlights[i];\r\n                if (h.series == s && h.point[0] == p[0]\r\n                    && h.point[1] == p[1])\r\n                    return i;\r\n            }\r\n            return -1;\r\n        }\r\n\r\n        function drawPointHighlight(series, point) {\r\n            var x = point[0], y = point[1],\r\n                axisx = series.xaxis, axisy = series.yaxis,\r\n                highlightColor = (typeof series.highlightColor === \"string\") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString();\r\n\r\n            if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max)\r\n                return;\r\n\r\n            var pointRadius = series.points.radius + series.points.lineWidth / 2;\r\n            octx.lineWidth = pointRadius;\r\n            octx.strokeStyle = highlightColor;\r\n            var radius = 1.5 * pointRadius;\r\n            x = axisx.p2c(x);\r\n            y = axisy.p2c(y);\r\n\r\n            octx.beginPath();\r\n            if (series.points.symbol == \"circle\")\r\n                octx.arc(x, y, radius, 0, 2 * Math.PI, false);\r\n            else\r\n                series.points.symbol(octx, x, y, radius, false);\r\n            octx.closePath();\r\n            octx.stroke();\r\n        }\r\n\r\n        function drawBarHighlight(series, point) {\r\n            var highlightColor = (typeof series.highlightColor === \"string\") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(),\r\n                fillStyle = highlightColor,\r\n                barLeft = series.bars.align == \"left\" ? 0 : -series.bars.barWidth/2;\r\n\r\n            octx.lineWidth = series.bars.lineWidth;\r\n            octx.strokeStyle = highlightColor;\r\n\r\n            drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth,\r\n                    0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal, series.bars.lineWidth);\r\n        }\r\n\r\n        function getColorOrGradient(spec, bottom, top, defaultColor) {\r\n            if (typeof spec == \"string\")\r\n                return spec;\r\n            else {\r\n                // assume this is a gradient spec; IE currently only\r\n                // supports a simple vertical gradient properly, so that's\r\n                // what we support too\r\n                var gradient = ctx.createLinearGradient(0, top, 0, bottom);\r\n\r\n                for (var i = 0, l = spec.colors.length; i < l; ++i) {\r\n                    var c = spec.colors[i];\r\n                    if (typeof c != \"string\") {\r\n                        var co = $.color.parse(defaultColor);\r\n                        if (c.brightness != null)\r\n                            co = co.scale('rgb', c.brightness);\r\n                        if (c.opacity != null)\r\n                            co.a *= c.opacity;\r\n                        c = co.toString();\r\n                    }\r\n                    gradient.addColorStop(i / (l - 1), c);\r\n                }\r\n\r\n                return gradient;\r\n            }\r\n        }\r\n    }\r\n\r\n    // Add the plot function to the top level of the jQuery object\r\n\r\n    $.plot = function(placeholder, data, options) {\r\n        //var t0 = new Date();\r\n        var plot = new Plot($(placeholder), data, options, $.plot.plugins);\r\n        //(window.console ? console.log : alert)(\"time used (msecs): \" + ((new Date()).getTime() - t0.getTime()));\r\n        return plot;\r\n    };\r\n\r\n    $.plot.version = \"0.8.0\";\r\n\r\n    $.plot.plugins = [];\r\n\r\n    // Also add the plot function as a chainable property\r\n\r\n    $.fn.plot = function(data, options) {\r\n        return this.each(function() {\r\n            $.plot(this, data, options);\r\n        });\r\n    }\r\n\r\n    // round to nearby lower multiple of base\r\n    function floorInBase(n, base) {\r\n        return base * Math.floor(n / base);\r\n    }\r\n\r\n})(jQuery);\r\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.tablesorter.js",
    "content": "/**!\n* TableSorter (FORK) 2.18.3 - Client-side table sorting with ease!\n* @requires jQuery v1.2.6+\n*\n* Copyright (c) 2007 Christian Bach\n* Examples and docs at: http://tablesorter.com\n* Dual licensed under the MIT and GPL licenses:\n* http://www.opensource.org/licenses/mit-license.php\n* http://www.gnu.org/licenses/gpl.html\n*\n* @type jQuery\n* @name tablesorter (FORK)\n* @cat Plugins/Tablesorter\n* @author Christian Bach/christian.bach@polyester.se\n* @contributor Rob Garrison/https://github.com/Mottie/tablesorter\n*/\n/*jshint browser:true, jquery:true, unused:false, expr: true */\n/*global console:false, alert:false */\n!(function($) {\n\t\"use strict\";\n\t$.extend({\n\t\t/*jshint supernew:true */\n\t\ttablesorter: new function() {\n\n\t\t\tvar ts = this;\n\n\t\t\tts.version = \"2.18.3\";\n\n\t\t\tts.parsers = [];\n\t\t\tts.widgets = [];\n\t\t\tts.defaults = {\n\n\t\t\t\t// *** appearance\n\t\t\t\ttheme            : 'default',  // adds tablesorter-{theme} to the table for styling\n\t\t\t\twidthFixed       : false,      // adds colgroup to fix widths of columns\n\t\t\t\tshowProcessing   : false,      // show an indeterminate timer icon in the header when the table is sorted or filtered.\n\n\t\t\t\theaderTemplate   : '{content}',// header layout template (HTML ok); {content} = innerHTML, {icon} = <i/> (class from cssIcon)\n\t\t\t\tonRenderTemplate : null,       // function(index, template){ return template; }, (template is a string)\n\t\t\t\tonRenderHeader   : null,       // function(index){}, (nothing to return)\n\n\t\t\t\t// *** functionality\n\t\t\t\tcancelSelection  : true,       // prevent text selection in the header\n\t\t\t\ttabIndex         : true,       // add tabindex to header for keyboard accessibility\n\t\t\t\tdateFormat       : 'mmddyyyy', // other options: \"ddmmyyy\" or \"yyyymmdd\"\n\t\t\t\tsortMultiSortKey : 'shiftKey', // key used to select additional columns\n\t\t\t\tsortResetKey     : 'ctrlKey',  // key used to remove sorting on a column\n\t\t\t\tusNumberFormat   : true,       // false for German \"1.234.567,89\" or French \"1 234 567,89\"\n\t\t\t\tdelayInit        : false,      // if false, the parsed table contents will not update until the first sort\n\t\t\t\tserverSideSorting: false,      // if true, server-side sorting should be performed because client-side sorting will be disabled, but the ui and events will still be used.\n\n\t\t\t\t// *** sort options\n\t\t\t\theaders          : {},         // set sorter, string, empty, locked order, sortInitialOrder, filter, etc.\n\t\t\t\tignoreCase       : true,       // ignore case while sorting\n\t\t\t\tsortForce        : null,       // column(s) first sorted; always applied\n\t\t\t\tsortList         : [],         // Initial sort order; applied initially; updated when manually sorted\n\t\t\t\tsortAppend       : null,       // column(s) sorted last; always applied\n\t\t\t\tsortStable       : false,      // when sorting two rows with exactly the same content, the original sort order is maintained\n\n\t\t\t\tsortInitialOrder : 'asc',      // sort direction on first click\n\t\t\t\tsortLocaleCompare: false,      // replace equivalent character (accented characters)\n\t\t\t\tsortReset        : false,      // third click on the header will reset column to default - unsorted\n\t\t\t\tsortRestart      : false,      // restart sort to \"sortInitialOrder\" when clicking on previously unsorted columns\n\n\t\t\t\temptyTo          : 'bottom',   // sort empty cell to bottom, top, none, zero\n\t\t\t\tstringTo         : 'max',      // sort strings in numerical column as max, min, top, bottom, zero\n\t\t\t\ttextExtraction   : 'basic',    // text extraction method/function - function(node, table, cellIndex){}\n\t\t\t\ttextAttribute    : 'data-text',// data-attribute that contains alternate cell text (used in textExtraction function)\n\t\t\t\ttextSorter       : null,       // choose overall or specific column sorter function(a, b, direction, table, columnIndex) [alt: ts.sortText]\n\t\t\t\tnumberSorter     : null,       // choose overall numeric sorter function(a, b, direction, maxColumnValue)\n\n\t\t\t\t// *** widget options\n\t\t\t\twidgets: [],                   // method to add widgets, e.g. widgets: ['zebra']\n\t\t\t\twidgetOptions    : {\n\t\t\t\t\tzebra : [ 'even', 'odd' ]    // zebra widget alternating row class names\n\t\t\t\t},\n\t\t\t\tinitWidgets      : true,       // apply widgets on tablesorter initialization\n\t\t\t\twidgetClass     : 'widget-{name}', // table class name template to match to include a widget\n\n\t\t\t\t// *** callbacks\n\t\t\t\tinitialized      : null,       // function(table){},\n\n\t\t\t\t// *** extra css class names\n\t\t\t\ttableClass       : '',\n\t\t\t\tcssAsc           : '',\n\t\t\t\tcssDesc          : '',\n\t\t\t\tcssNone          : '',\n\t\t\t\tcssHeader        : '',\n\t\t\t\tcssHeaderRow     : '',\n\t\t\t\tcssProcessing    : '', // processing icon applied to header during sort/filter\n\n\t\t\t\tcssChildRow      : 'tablesorter-childRow', // class name indiciating that a row is to be attached to the its parent\n\t\t\t\tcssIcon          : 'tablesorter-icon',     //  if this class exists, a <i> will be added to the header automatically\n\t\t\t\tcssIconNone      : '', // class name added to the icon when there is no column sort\n\t\t\t\tcssIconAsc       : '', // class name added to the icon when the column has an ascending sort\n\t\t\t\tcssIconDesc      : '', // class name added to the icon when the column has a descending sort\n\t\t\t\tcssInfoBlock     : 'tablesorter-infoOnly', // don't sort tbody with this class name (only one class name allowed here!)\n\t\t\t\tcssAllowClicks   : 'tablesorter-allowClicks', // class name added to table header which allows clicks to bubble up\n\n\t\t\t\t// *** selectors\n\t\t\t\tselectorHeaders  : '> thead th, > thead td',\n\t\t\t\tselectorSort     : 'th, td',   // jQuery selector of content within selectorHeaders that is clickable to trigger a sort\n\t\t\t\tselectorRemove   : '.remove-me',\n\n\t\t\t\t// *** advanced\n\t\t\t\tdebug            : false,\n\n\t\t\t\t// *** Internal variables\n\t\t\t\theaderList: [],\n\t\t\t\tempties: {},\n\t\t\t\tstrings: {},\n\t\t\t\tparsers: []\n\n\t\t\t\t// deprecated; but retained for backwards compatibility\n\t\t\t\t// widgetZebra: { css: [\"even\", \"odd\"] }\n\n\t\t\t};\n\n\t\t\t// internal css classes - these will ALWAYS be added to\n\t\t\t// the table and MUST only contain one class name - fixes #381\n\t\t\tts.css = {\n\t\t\t\ttable      : 'tablesorter',\n\t\t\t\tcssHasChild: 'tablesorter-hasChildRow',\n\t\t\t\tchildRow   : 'tablesorter-childRow',\n\t\t\t\theader     : 'tablesorter-header',\n\t\t\t\theaderRow  : 'tablesorter-headerRow',\n\t\t\t\theaderIn   : 'tablesorter-header-inner',\n\t\t\t\ticon       : 'tablesorter-icon',\n\t\t\t\tinfo       : 'tablesorter-infoOnly',\n\t\t\t\tprocessing : 'tablesorter-processing',\n\t\t\t\tsortAsc    : 'tablesorter-headerAsc',\n\t\t\t\tsortDesc   : 'tablesorter-headerDesc',\n\t\t\t\tsortNone   : 'tablesorter-headerUnSorted'\n\t\t\t};\n\n\t\t\t// labels applied to sortable headers for accessibility (aria) support\n\t\t\tts.language = {\n\t\t\t\tsortAsc  : 'Ascending sort applied, ',\n\t\t\t\tsortDesc : 'Descending sort applied, ',\n\t\t\t\tsortNone : 'No sort applied, ',\n\t\t\t\tnextAsc  : 'activate to apply an ascending sort',\n\t\t\t\tnextDesc : 'activate to apply a descending sort',\n\t\t\t\tnextNone : 'activate to remove the sort'\n\t\t\t};\n\n\t\t\t/* debuging utils */\n\t\t\tfunction log() {\n\t\t\t\tvar a = arguments[0],\n\t\t\t\t\ts = arguments.length > 1 ? Array.prototype.slice.call(arguments) : a;\n\t\t\t\tif (typeof console !== \"undefined\" && typeof console.log !== \"undefined\") {\n\t\t\t\t\tconsole[ /error/i.test(a) ? 'error' : /warn/i.test(a) ? 'warn' : 'log' ](s);\n\t\t\t\t} else {\n\t\t\t\t\talert(s);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction benchmark(s, d) {\n\t\t\t\tlog(s + \" (\" + (new Date().getTime() - d.getTime()) + \"ms)\");\n\t\t\t}\n\n\t\t\tts.log = log;\n\t\t\tts.benchmark = benchmark;\n\n\t\t\t// $.isEmptyObject from jQuery v1.4\n\t\t\tfunction isEmptyObject(obj) {\n\t\t\t\t/*jshint forin: false */\n\t\t\t\tfor (var name in obj) {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t\treturn true;\n\t\t\t}\n\n\t\t\tfunction getElementText(table, node, cellIndex) {\n\t\t\t\tif (!node) { return \"\"; }\n\t\t\t\tvar te, c = table.config,\n\t\t\t\t\tt = c.textExtraction || '',\n\t\t\t\t\ttext = \"\";\n\t\t\t\tif (t === \"basic\") {\n\t\t\t\t\t// check data-attribute first\n\t\t\t\t\ttext = $(node).attr(c.textAttribute) || node.textContent || node.innerText || $(node).text() || \"\";\n\t\t\t\t} else {\n\t\t\t\t\tif (typeof(t) === \"function\") {\n\t\t\t\t\t\ttext = t(node, table, cellIndex);\n\t\t\t\t\t} else if (typeof (te = ts.getColumnData( table, t, cellIndex )) === 'function') {\n\t\t\t\t\t\ttext = te(node, table, cellIndex);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// previous \"simple\" method\n\t\t\t\t\t\ttext = node.textContent || node.innerText || $(node).text() || \"\";\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn $.trim(text);\n\t\t\t}\n\n\t\t\tfunction detectParserForColumn(table, rows, rowIndex, cellIndex) {\n\t\t\t\tvar cur, $node,\n\t\t\t\ti = ts.parsers.length,\n\t\t\t\tnode = false,\n\t\t\t\tnodeValue = '',\n\t\t\t\tkeepLooking = true;\n\t\t\t\twhile (nodeValue === '' && keepLooking) {\n\t\t\t\t\trowIndex++;\n\t\t\t\t\tif (rows[rowIndex]) {\n\t\t\t\t\t\tnode = rows[rowIndex].cells[cellIndex];\n\t\t\t\t\t\tnodeValue = getElementText(table, node, cellIndex);\n\t\t\t\t\t\t$node = $(node);\n\t\t\t\t\t\tif (table.config.debug) {\n\t\t\t\t\t\t\tlog('Checking if value was empty on row ' + rowIndex + ', column: ' + cellIndex + ': \"' + nodeValue + '\"');\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\tkeepLooking = false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\twhile (--i >= 0) {\n\t\t\t\t\tcur = ts.parsers[i];\n\t\t\t\t\t// ignore the default text parser because it will always be true\n\t\t\t\t\tif (cur && cur.id !== 'text' && cur.is && cur.is(nodeValue, table, node, $node)) {\n\t\t\t\t\t\treturn cur;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// nothing found, return the generic parser (text)\n\t\t\t\treturn ts.getParserById('text');\n\t\t\t}\n\n\t\t\tfunction buildParserCache(table) {\n\t\t\t\tvar c = table.config,\n\t\t\t\t\t// update table bodies in case we start with an empty table\n\t\t\t\t\ttb = c.$tbodies = c.$table.children('tbody:not(.' + c.cssInfoBlock + ')'),\n\t\t\t\t\trows, list, l, i, h, ch, np, p, e, time,\n\t\t\t\t\tj = 0,\n\t\t\t\t\tparsersDebug = \"\",\n\t\t\t\t\tlen = tb.length;\n\t\t\t\tif ( len === 0) {\n\t\t\t\t\treturn c.debug ? log('Warning: *Empty table!* Not building a parser cache') : '';\n\t\t\t\t} else if (c.debug) {\n\t\t\t\t\ttime = new Date();\n\t\t\t\t\tlog('Detecting parsers for each column');\n\t\t\t\t}\n\t\t\t\tlist = {\n\t\t\t\t\textractors: [],\n\t\t\t\t\tparsers: []\n\t\t\t\t};\n\t\t\t\twhile (j < len) {\n\t\t\t\t\trows = tb[j].rows;\n\t\t\t\t\tif (rows[j]) {\n\t\t\t\t\t\tl = c.columns; // rows[j].cells.length;\n\t\t\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\t\t\th = c.$headers.filter('[data-column=\"' + i + '\"]:last');\n\t\t\t\t\t\t\t// get column indexed table cell\n\t\t\t\t\t\t\tch = ts.getColumnData( table, c.headers, i );\n\t\t\t\t\t\t\t// get column parser/extractor\n\t\t\t\t\t\t\te = ts.getParserById( ts.getData(h, ch, 'extractor') );\n\t\t\t\t\t\t\tp = ts.getParserById( ts.getData(h, ch, 'sorter') );\n\t\t\t\t\t\t\tnp = ts.getData(h, ch, 'parser') === 'false';\n\t\t\t\t\t\t\t// empty cells behaviour - keeping emptyToBottom for backwards compatibility\n\t\t\t\t\t\t\tc.empties[i] = ( ts.getData(h, ch, 'empty') || c.emptyTo || (c.emptyToBottom ? 'bottom' : 'top' ) ).toLowerCase();\n\t\t\t\t\t\t\t// text strings behaviour in numerical sorts\n\t\t\t\t\t\t\tc.strings[i] = ( ts.getData(h, ch, 'string') || c.stringTo || 'max' ).toLowerCase();\n\t\t\t\t\t\t\tif (np) {\n\t\t\t\t\t\t\t\tp = ts.getParserById('no-parser');\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (!e) {\n\t\t\t\t\t\t\t\t// For now, maybe detect someday\n\t\t\t\t\t\t\t\te = false;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (!p) {\n\t\t\t\t\t\t\t\tp = detectParserForColumn(table, rows, -1, i);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (c.debug) {\n\t\t\t\t\t\t\t\tparsersDebug += \"column:\" + i + \"; extractor:\" + e.id + \"; parser:\" + p.id + \"; string:\" + c.strings[i] + '; empty: ' + c.empties[i] + \"\\n\";\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tlist.parsers[i] = p;\n\t\t\t\t\t\t\tlist.extractors[i] = e;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tj += (list.parsers.length) ? len : 1;\n\t\t\t\t}\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tlog(parsersDebug ? parsersDebug : \"No parsers detected\");\n\t\t\t\t\tbenchmark(\"Completed detecting parsers\", time);\n\t\t\t\t}\n\t\t\t\tc.parsers = list.parsers;\n\t\t\t\tc.extractors = list.extractors;\n\t\t\t}\n\n\t\t\t/* utils */\n\t\t\tfunction buildCache(table) {\n\t\t\t\tvar cc, t, tx, v, i, j, k, $row, rows, cols, cacheTime,\n\t\t\t\t\ttotalRows, rowData, colMax,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\t$tb = c.$table.children('tbody'),\n\t\t\t\t\textractors = c.extractors,\n\t\t\t\t\tparsers = c.parsers;\n\t\t\t\tc.cache = {};\n\t\t\t\tc.totalRows = 0;\n\t\t\t\t// if no parsers found, return - it's an empty table.\n\t\t\t\tif (!parsers) {\n\t\t\t\t\treturn c.debug ? log('Warning: *Empty table!* Not building a cache') : '';\n\t\t\t\t}\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tcacheTime = new Date();\n\t\t\t\t}\n\t\t\t\t// processing icon\n\t\t\t\tif (c.showProcessing) {\n\t\t\t\t\tts.isProcessing(table, true);\n\t\t\t\t}\n\t\t\t\tfor (k = 0; k < $tb.length; k++) {\n\t\t\t\t\tcolMax = []; // column max value per tbody\n\t\t\t\t\tcc = c.cache[k] = {\n\t\t\t\t\t\tnormalized: [] // array of normalized row data; last entry contains \"rowData\" above\n\t\t\t\t\t\t// colMax: #   // added at the end\n\t\t\t\t\t};\n\n\t\t\t\t\t// ignore tbodies with class name from c.cssInfoBlock\n\t\t\t\t\tif (!$tb.eq(k).hasClass(c.cssInfoBlock)) {\n\t\t\t\t\t\ttotalRows = ($tb[k] && $tb[k].rows.length) || 0;\n\t\t\t\t\t\tfor (i = 0; i < totalRows; ++i) {\n\t\t\t\t\t\t\trowData = {\n\t\t\t\t\t\t\t\t// order: original row order #\n\t\t\t\t\t\t\t\t// $row : jQuery Object[]\n\t\t\t\t\t\t\t\tchild: [] // child row text (filter widget)\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t/** Add the table data to main data array */\n\t\t\t\t\t\t\t$row = $($tb[k].rows[i]);\n\t\t\t\t\t\t\trows = [ new Array(c.columns) ];\n\t\t\t\t\t\t\tcols = [];\n\t\t\t\t\t\t\t// if this is a child row, add it to the last row's children and continue to the next row\n\t\t\t\t\t\t\t// ignore child row class, if it is the first row\n\t\t\t\t\t\t\tif ($row.hasClass(c.cssChildRow) && i !== 0) {\n\t\t\t\t\t\t\t\tt = cc.normalized.length - 1;\n\t\t\t\t\t\t\t\tcc.normalized[t][c.columns].$row = cc.normalized[t][c.columns].$row.add($row);\n\t\t\t\t\t\t\t\t// add \"hasChild\" class name to parent row\n\t\t\t\t\t\t\t\tif (!$row.prev().hasClass(c.cssChildRow)) {\n\t\t\t\t\t\t\t\t\t$row.prev().addClass(ts.css.cssHasChild);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t// save child row content (un-parsed!)\n\t\t\t\t\t\t\t\trowData.child[t] = $.trim( $row[0].textContent || $row[0].innerText || $row.text() || \"\" );\n\t\t\t\t\t\t\t\t// go to the next for loop\n\t\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\trowData.$row = $row;\n\t\t\t\t\t\t\trowData.order = i; // add original row position to rowCache\n\t\t\t\t\t\t\tfor (j = 0; j < c.columns; ++j) {\n\t\t\t\t\t\t\t\tif (typeof parsers[j] === 'undefined') {\n\t\t\t\t\t\t\t\t\tif (c.debug) {\n\t\t\t\t\t\t\t\t\t\tlog('No parser found for cell:', $row[0].cells[j], 'does it have a header?');\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tt = getElementText(table, $row[0].cells[j], j);\n\t\t\t\t\t\t\t\t// do extract before parsing if there is one\n\t\t\t\t\t\t\t\tif (typeof extractors[j].id === 'undefined') {\n\t\t\t\t\t\t\t\t\ttx = t;\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\ttx = extractors[j].format(t, table, $row[0].cells[j], j);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t// allow parsing if the string is empty, previously parsing would change it to zero,\n\t\t\t\t\t\t\t\t// in case the parser needs to extract data from the table cell attributes\n\t\t\t\t\t\t\t\tv = parsers[j].id === 'no-parser' ? '' : parsers[j].format(tx, table, $row[0].cells[j], j);\n\t\t\t\t\t\t\t\tcols.push( c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v );\n\t\t\t\t\t\t\t\tif ((parsers[j].type || '').toLowerCase() === \"numeric\") {\n\t\t\t\t\t\t\t\t\t// determine column max value (ignore sign)\n\t\t\t\t\t\t\t\t\tcolMax[j] = Math.max(Math.abs(v) || 0, colMax[j] || 0);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t// ensure rowData is always in the same location (after the last column)\n\t\t\t\t\t\t\tcols[c.columns] = rowData;\n\t\t\t\t\t\t\tcc.normalized.push(cols);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcc.colMax = colMax;\n\t\t\t\t\t\t// total up rows, not including child rows\n\t\t\t\t\t\tc.totalRows += cc.normalized.length;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (c.showProcessing) {\n\t\t\t\t\tts.isProcessing(table); // remove processing icon\n\t\t\t\t}\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tbenchmark(\"Building cache for \" + totalRows + \" rows\", cacheTime);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// init flag (true) used by pager plugin to prevent widget application\n\t\t\tfunction appendToTable(table, init) {\n\t\t\t\tvar c = table.config,\n\t\t\t\t\two = c.widgetOptions,\n\t\t\t\t\tb = table.tBodies,\n\t\t\t\t\trows = [],\n\t\t\t\t\tcc = c.cache,\n\t\t\t\t\tn, totalRows, $bk, $tb,\n\t\t\t\t\ti, k, appendTime;\n\t\t\t\t// empty table - fixes #206/#346\n\t\t\t\tif (isEmptyObject(cc)) {\n\t\t\t\t\t// run pager appender in case the table was just emptied\n\t\t\t\t\treturn c.appender ? c.appender(table, rows) :\n\t\t\t\t\t\ttable.isUpdating ? c.$table.trigger(\"updateComplete\", table) : ''; // Fixes #532\n\t\t\t\t}\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tappendTime = new Date();\n\t\t\t\t}\n\t\t\t\tfor (k = 0; k < b.length; k++) {\n\t\t\t\t\t$bk = $(b[k]);\n\t\t\t\t\tif ($bk.length && !$bk.hasClass(c.cssInfoBlock)) {\n\t\t\t\t\t\t// get tbody\n\t\t\t\t\t\t$tb = ts.processTbody(table, $bk, true);\n\t\t\t\t\t\tn = cc[k].normalized;\n\t\t\t\t\t\ttotalRows = n.length;\n\t\t\t\t\t\tfor (i = 0; i < totalRows; i++) {\n\t\t\t\t\t\t\trows.push(n[i][c.columns].$row);\n\t\t\t\t\t\t\t// removeRows used by the pager plugin; don't render if using ajax - fixes #411\n\t\t\t\t\t\t\tif (!c.appender || (c.pager && (!c.pager.removeRows || !wo.pager_removeRows) && !c.pager.ajax)) {\n\t\t\t\t\t\t\t\t$tb.append(n[i][c.columns].$row);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// restore tbody\n\t\t\t\t\t\tts.processTbody(table, $tb, false);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (c.appender) {\n\t\t\t\t\tc.appender(table, rows);\n\t\t\t\t}\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tbenchmark(\"Rebuilt table\", appendTime);\n\t\t\t\t}\n\t\t\t\t// apply table widgets; but not before ajax completes\n\t\t\t\tif (!init && !c.appender) { ts.applyWidget(table); }\n\t\t\t\tif (table.isUpdating) {\n\t\t\t\t\tc.$table.trigger(\"updateComplete\", table);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction formatSortingOrder(v) {\n\t\t\t\t// look for \"d\" in \"desc\" order; return true\n\t\t\t\treturn (/^d/i.test(v) || v === 1);\n\t\t\t}\n\n\t\t\tfunction buildHeaders(table) {\n\t\t\t\tvar ch, $t,\n\t\t\t\t\th, i, t, lock, time,\n\t\t\t\t\tc = table.config;\n\t\t\t\tc.headerList = [];\n\t\t\t\tc.headerContent = [];\n\t\t\t\tif (c.debug) {\n\t\t\t\t\ttime = new Date();\n\t\t\t\t}\n\t\t\t\t// children tr in tfoot - see issue #196 & #547\n\t\t\t\tc.columns = ts.computeColumnIndex( c.$table.children('thead, tfoot').children('tr') );\n\t\t\t\t// add icon if cssIcon option exists\n\t\t\t\ti = c.cssIcon ? '<i class=\"' + ( c.cssIcon === ts.css.icon ? ts.css.icon : c.cssIcon + ' ' + ts.css.icon ) + '\"></i>' : '';\n\t\t\t\t// redefine c.$headers here in case of an updateAll that replaces or adds an entire header cell - see #683\n\t\t\t\tc.$headers = $(table).find(c.selectorHeaders).each(function(index) {\n\t\t\t\t\t$t = $(this);\n\t\t\t\t\t// make sure to get header cell & not column indexed cell\n\t\t\t\t\tch = ts.getColumnData( table, c.headers, index, true );\n\t\t\t\t\t// save original header content\n\t\t\t\t\tc.headerContent[index] = $(this).html();\n\t\t\t\t\t// if headerTemplate is empty, don't reformat the header cell\n\t\t\t\t\tif ( c.headerTemplate !== '' ) {\n\t\t\t\t\t\t// set up header template\n\t\t\t\t\t\tt = c.headerTemplate.replace(/\\{content\\}/g, $(this).html()).replace(/\\{icon\\}/g, i);\n\t\t\t\t\t\tif (c.onRenderTemplate) {\n\t\t\t\t\t\t\th = c.onRenderTemplate.apply($t, [index, t]);\n\t\t\t\t\t\t\tif (h && typeof h === 'string') { t = h; } // only change t if something is returned\n\t\t\t\t\t\t}\n\t\t\t\t\t\t$(this).html('<div class=\"' + ts.css.headerIn + '\">' + t + '</div>'); // faster than wrapInner\n\t\t\t\t\t}\n\t\t\t\t\tif (c.onRenderHeader) { c.onRenderHeader.apply($t, [index, c, c.$table]); }\n\t\t\t\t\t// *** remove this.column value if no conflicts found\n\t\t\t\t\tthis.column = parseInt( $(this).attr('data-column'), 10);\n\t\t\t\t\tthis.order = formatSortingOrder( ts.getData($t, ch, 'sortInitialOrder') || c.sortInitialOrder ) ? [1,0,2] : [0,1,2];\n\t\t\t\t\tthis.count = -1; // set to -1 because clicking on the header automatically adds one\n\t\t\t\t\tthis.lockedOrder = false;\n\t\t\t\t\tlock = ts.getData($t, ch, 'lockedOrder') || false;\n\t\t\t\t\tif (typeof lock !== 'undefined' && lock !== false) {\n\t\t\t\t\t\tthis.order = this.lockedOrder = formatSortingOrder(lock) ? [1,1,1] : [0,0,0];\n\t\t\t\t\t}\n\t\t\t\t\t$t.addClass(ts.css.header + ' ' + c.cssHeader);\n\t\t\t\t\t// add cell to headerList\n\t\t\t\t\tc.headerList[index] = this;\n\t\t\t\t\t// add to parent in case there are multiple rows\n\t\t\t\t\t$t.parent().addClass(ts.css.headerRow + ' ' + c.cssHeaderRow).attr('role', 'row');\n\t\t\t\t\t// allow keyboard cursor to focus on element\n\t\t\t\t\tif (c.tabIndex) { $t.attr(\"tabindex\", 0); }\n\t\t\t\t}).attr({\n\t\t\t\t\tscope: 'col',\n\t\t\t\t\trole : 'columnheader'\n\t\t\t\t});\n\t\t\t\t// enable/disable sorting\n\t\t\t\tupdateHeader(table);\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tbenchmark(\"Built headers:\", time);\n\t\t\t\t\tlog(c.$headers);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction commonUpdate(table, resort, callback) {\n\t\t\t\tvar c = table.config;\n\t\t\t\t// remove rows/elements before update\n\t\t\t\tc.$table.find(c.selectorRemove).remove();\n\t\t\t\t// rebuild parsers\n\t\t\t\tbuildParserCache(table);\n\t\t\t\t// rebuild the cache map\n\t\t\t\tbuildCache(table);\n\t\t\t\tcheckResort(c.$table, resort, callback);\n\t\t\t}\n\n\t\t\tfunction updateHeader(table) {\n\t\t\t\tvar s, $th, col,\n\t\t\t\t\tc = table.config;\n\t\t\t\tc.$headers.each(function(index, th){\n\t\t\t\t\t$th = $(th);\n\t\t\t\t\tcol = ts.getColumnData( table, c.headers, index, true );\n\t\t\t\t\t// add \"sorter-false\" class if \"parser-false\" is set\n\t\t\t\t\ts = ts.getData( th, col, 'sorter' ) === 'false' || ts.getData( th, col, 'parser' ) === 'false';\n\t\t\t\t\tth.sortDisabled = s;\n\t\t\t\t\t$th[ s ? 'addClass' : 'removeClass' ]('sorter-false').attr('aria-disabled', '' + s);\n\t\t\t\t\t// aria-controls - requires table ID\n\t\t\t\t\tif (table.id) {\n\t\t\t\t\t\tif (s) {\n\t\t\t\t\t\t\t$th.removeAttr('aria-controls');\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t$th.attr('aria-controls', table.id);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tfunction setHeadersCss(table) {\n\t\t\t\tvar f, i, j,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\tlist = c.sortList,\n\t\t\t\t\tlen = list.length,\n\t\t\t\t\tnone = ts.css.sortNone + ' ' + c.cssNone,\n\t\t\t\t\tcss = [ts.css.sortAsc + ' ' + c.cssAsc, ts.css.sortDesc + ' ' + c.cssDesc],\n\t\t\t\t\tcssIcon = [ c.cssIconAsc, c.cssIconDesc, c.cssIconNone ],\n\t\t\t\t\taria = ['ascending', 'descending'],\n\t\t\t\t\t// find the footer\n\t\t\t\t\t$t = $(table).find('tfoot tr').children().add(c.$extraHeaders).removeClass(css.join(' '));\n\t\t\t\t// remove all header information\n\t\t\t\tc.$headers\n\t\t\t\t\t.removeClass(css.join(' '))\n\t\t\t\t\t.addClass(none).attr('aria-sort', 'none')\n\t\t\t\t\t.find('.' + c.cssIcon)\n\t\t\t\t\t.removeClass(cssIcon.join(' '))\n\t\t\t\t\t.addClass(cssIcon[2]);\n\t\t\t\tfor (i = 0; i < len; i++) {\n\t\t\t\t\t// direction = 2 means reset!\n\t\t\t\t\tif (list[i][1] !== 2) {\n\t\t\t\t\t\t// multicolumn sorting updating - choose the :last in case there are nested columns\n\t\t\t\t\t\tf = c.$headers.not('.sorter-false').filter('[data-column=\"' + list[i][0] + '\"]' + (len === 1 ? ':last' : '') );\n\t\t\t\t\t\tif (f.length) {\n\t\t\t\t\t\t\tfor (j = 0; j < f.length; j++) {\n\t\t\t\t\t\t\t\tif (!f[j].sortDisabled) {\n\t\t\t\t\t\t\t\t\tf.eq(j)\n\t\t\t\t\t\t\t\t\t\t.removeClass(none)\n\t\t\t\t\t\t\t\t\t\t.addClass(css[list[i][1]])\n\t\t\t\t\t\t\t\t\t\t.attr('aria-sort', aria[list[i][1]])\n\t\t\t\t\t\t\t\t\t\t.find('.' + c.cssIcon)\n\t\t\t\t\t\t\t\t\t\t.removeClass(cssIcon[2])\n\t\t\t\t\t\t\t\t\t\t.addClass(cssIcon[list[i][1]]);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t// add sorted class to footer & extra headers, if they exist\n\t\t\t\t\t\t\tif ($t.length) {\n\t\t\t\t\t\t\t\t$t.filter('[data-column=\"' + list[i][0] + '\"]').removeClass(none).addClass(css[list[i][1]]);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// add verbose aria labels\n\t\t\t\tc.$headers.not('.sorter-false').each(function(){\n\t\t\t\t\tvar $this = $(this),\n\t\t\t\t\t\tnextSort = this.order[(this.count + 1) % (c.sortReset ? 3 : 2)],\n\t\t\t\t\t\ttxt = $this.text() + ': ' +\n\t\t\t\t\t\t\tts.language[ $this.hasClass(ts.css.sortAsc) ? 'sortAsc' : $this.hasClass(ts.css.sortDesc) ? 'sortDesc' : 'sortNone' ] +\n\t\t\t\t\t\t\tts.language[ nextSort === 0 ? 'nextAsc' : nextSort === 1 ? 'nextDesc' : 'nextNone' ];\n\t\t\t\t\t$this.attr('aria-label', txt );\n\t\t\t\t});\n\t\t\t}\n\n\t\t\t// automatically add col group, and column sizes if set\n\t\t\tfunction fixColumnWidth(table) {\n\t\t\t\tvar colgroup, overallWidth,\n\t\t\t\t\tc = table.config;\n\t\t\t\tif (c.widthFixed && c.$table.children('colgroup').length === 0) {\n\t\t\t\t\tcolgroup = $('<colgroup>');\n\t\t\t\t\toverallWidth = $(table).width();\n\t\t\t\t\t// only add col for visible columns - fixes #371\n\t\t\t\t\t$(table.tBodies).not('.' + c.cssInfoBlock).find(\"tr:first\").children(\":visible\").each(function() {\n\t\t\t\t\t\tcolgroup.append($('<col>').css('width', parseInt(($(this).width()/overallWidth)*1000, 10)/10 + '%'));\n\t\t\t\t\t});\n\t\t\t\t\tc.$table.prepend(colgroup);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction updateHeaderSortCount(table, list) {\n\t\t\t\tvar s, t, o, col, primary,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\tsl = list || c.sortList;\n\t\t\t\tc.sortList = [];\n\t\t\t\t$.each(sl, function(i,v){\n\t\t\t\t\t// ensure all sortList values are numeric - fixes #127\n\t\t\t\t\tcol = parseInt(v[0], 10);\n\t\t\t\t\t// make sure header exists\n\t\t\t\t\to = c.$headers.filter('[data-column=\"' + col + '\"]:last')[0];\n\t\t\t\t\tif (o) { // prevents error if sorton array is wrong\n\t\t\t\t\t\t// o.count = o.count + 1;\n\t\t\t\t\t\tt = ('' + v[1]).match(/^(1|d|s|o|n)/);\n\t\t\t\t\t\tt = t ? t[0] : '';\n\t\t\t\t\t\t// 0/(a)sc (default), 1/(d)esc, (s)ame, (o)pposite, (n)ext\n\t\t\t\t\t\tswitch(t) {\n\t\t\t\t\t\t\tcase '1': case 'd': // descending\n\t\t\t\t\t\t\t\tt = 1;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase 's': // same direction (as primary column)\n\t\t\t\t\t\t\t\t// if primary sort is set to \"s\", make it ascending\n\t\t\t\t\t\t\t\tt = primary || 0;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase 'o':\n\t\t\t\t\t\t\t\ts = o.order[(primary || 0) % (c.sortReset ? 3 : 2)];\n\t\t\t\t\t\t\t\t// opposite of primary column; but resets if primary resets\n\t\t\t\t\t\t\t\tt = s === 0 ? 1 : s === 1 ? 0 : 2;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase 'n':\n\t\t\t\t\t\t\t\to.count = o.count + 1;\n\t\t\t\t\t\t\t\tt = o.order[(o.count) % (c.sortReset ? 3 : 2)];\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tdefault: // ascending\n\t\t\t\t\t\t\t\tt = 0;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tprimary = i === 0 ? t : primary;\n\t\t\t\t\t\ts = [ col, parseInt(t, 10) || 0 ];\n\t\t\t\t\t\tc.sortList.push(s);\n\t\t\t\t\t\tt = $.inArray(s[1], o.order); // fixes issue #167\n\t\t\t\t\t\to.count = t >= 0 ? t : s[1] % (c.sortReset ? 3 : 2);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tfunction getCachedSortType(parsers, i) {\n\t\t\t\treturn (parsers && parsers[i]) ? parsers[i].type || '' : '';\n\t\t\t}\n\n\t\t\tfunction initSort(table, cell, event){\n\t\t\t\tif (table.isUpdating) {\n\t\t\t\t\t// let any updates complete before initializing a sort\n\t\t\t\t\treturn setTimeout(function(){ initSort(table, cell, event); }, 50);\n\t\t\t\t}\n\t\t\t\tvar arry, indx, col, order, s,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\tkey = !event[c.sortMultiSortKey],\n\t\t\t\t\t$table = c.$table;\n\t\t\t\t// Only call sortStart if sorting is enabled\n\t\t\t\t$table.trigger(\"sortStart\", table);\n\t\t\t\t// get current column sort order\n\t\t\t\tcell.count = event[c.sortResetKey] ? 2 : (cell.count + 1) % (c.sortReset ? 3 : 2);\n\t\t\t\t// reset all sorts on non-current column - issue #30\n\t\t\t\tif (c.sortRestart) {\n\t\t\t\t\tindx = cell;\n\t\t\t\t\tc.$headers.each(function() {\n\t\t\t\t\t\t// only reset counts on columns that weren't just clicked on and if not included in a multisort\n\t\t\t\t\t\tif (this !== indx && (key || !$(this).is('.' + ts.css.sortDesc + ',.' + ts.css.sortAsc))) {\n\t\t\t\t\t\t\tthis.count = -1;\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\t// get current column index\n\t\t\t\tindx = parseInt( $(cell).attr('data-column'), 10 );\n\t\t\t\t// user only wants to sort on one column\n\t\t\t\tif (key) {\n\t\t\t\t\t// flush the sort list\n\t\t\t\t\tc.sortList = [];\n\t\t\t\t\tif (c.sortForce !== null) {\n\t\t\t\t\t\tarry = c.sortForce;\n\t\t\t\t\t\tfor (col = 0; col < arry.length; col++) {\n\t\t\t\t\t\t\tif (arry[col][0] !== indx) {\n\t\t\t\t\t\t\t\tc.sortList.push(arry[col]);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t// add column to sort list\n\t\t\t\t\torder = cell.order[cell.count];\n\t\t\t\t\tif (order < 2) {\n\t\t\t\t\t\tc.sortList.push([indx, order]);\n\t\t\t\t\t\t// add other columns if header spans across multiple\n\t\t\t\t\t\tif (cell.colSpan > 1) {\n\t\t\t\t\t\t\tfor (col = 1; col < cell.colSpan; col++) {\n\t\t\t\t\t\t\t\tc.sortList.push([indx + col, order]);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t// multi column sorting\n\t\t\t\t} else {\n\t\t\t\t\t// get rid of the sortAppend before adding more - fixes issue #115 & #523\n\t\t\t\t\tif (c.sortAppend && c.sortList.length > 1) {\n\t\t\t\t\t\tfor (col = 0; col < c.sortAppend.length; col++) {\n\t\t\t\t\t\t\ts = ts.isValueInArray(c.sortAppend[col][0], c.sortList);\n\t\t\t\t\t\t\tif (s >= 0) {\n\t\t\t\t\t\t\t\tc.sortList.splice(s,1);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t// the user has clicked on an already sorted column\n\t\t\t\t\tif (ts.isValueInArray(indx, c.sortList) >= 0) {\n\t\t\t\t\t\t// reverse the sorting direction\n\t\t\t\t\t\tfor (col = 0; col < c.sortList.length; col++) {\n\t\t\t\t\t\t\ts = c.sortList[col];\n\t\t\t\t\t\t\torder = c.$headers.filter('[data-column=\"' + s[0] + '\"]:last')[0];\n\t\t\t\t\t\t\tif (s[0] === indx) {\n\t\t\t\t\t\t\t\t// order.count seems to be incorrect when compared to cell.count\n\t\t\t\t\t\t\t\ts[1] = order.order[cell.count];\n\t\t\t\t\t\t\t\tif (s[1] === 2) {\n\t\t\t\t\t\t\t\t\tc.sortList.splice(col,1);\n\t\t\t\t\t\t\t\t\torder.count = -1;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// add column to sort list array\n\t\t\t\t\t\torder = cell.order[cell.count];\n\t\t\t\t\t\tif (order < 2) {\n\t\t\t\t\t\t\tc.sortList.push([indx, order]);\n\t\t\t\t\t\t\t// add other columns if header spans across multiple\n\t\t\t\t\t\t\tif (cell.colSpan > 1) {\n\t\t\t\t\t\t\t\tfor (col = 1; col < cell.colSpan; col++) {\n\t\t\t\t\t\t\t\t\tc.sortList.push([indx + col, order]);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (c.sortAppend !== null) {\n\t\t\t\t\tarry = c.sortAppend;\n\t\t\t\t\tfor (col = 0; col < arry.length; col++) {\n\t\t\t\t\t\tif (arry[col][0] !== indx) {\n\t\t\t\t\t\t\tc.sortList.push(arry[col]);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// sortBegin event triggered immediately before the sort\n\t\t\t\t$table.trigger(\"sortBegin\", table);\n\t\t\t\t// setTimeout needed so the processing icon shows up\n\t\t\t\tsetTimeout(function(){\n\t\t\t\t\t// set css for headers\n\t\t\t\t\tsetHeadersCss(table);\n\t\t\t\t\tmultisort(table);\n\t\t\t\t\tappendToTable(table);\n\t\t\t\t\t$table.trigger(\"sortEnd\", table);\n\t\t\t\t}, 1);\n\t\t\t}\n\n\t\t\t// sort multiple columns\n\t\t\tfunction multisort(table) { /*jshint loopfunc:true */\n\t\t\t\tvar i, k, num, col, sortTime, colMax,\n\t\t\t\t\tcache, order, sort, x, y,\n\t\t\t\t\tdir = 0,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\tcts = c.textSorter || '',\n\t\t\t\t\tsortList = c.sortList,\n\t\t\t\t\tl = sortList.length,\n\t\t\t\t\tbl = table.tBodies.length;\n\t\t\t\tif (c.serverSideSorting || isEmptyObject(c.cache)) { // empty table - fixes #206/#346\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\tif (c.debug) { sortTime = new Date(); }\n\t\t\t\tfor (k = 0; k < bl; k++) {\n\t\t\t\t\tcolMax = c.cache[k].colMax;\n\t\t\t\t\tcache = c.cache[k].normalized;\n\n\t\t\t\t\tcache.sort(function(a, b) {\n\t\t\t\t\t\t// cache is undefined here in IE, so don't use it!\n\t\t\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\t\t\tcol = sortList[i][0];\n\t\t\t\t\t\t\torder = sortList[i][1];\n\t\t\t\t\t\t\t// sort direction, true = asc, false = desc\n\t\t\t\t\t\t\tdir = order === 0;\n\n\t\t\t\t\t\t\tif (c.sortStable && a[col] === b[col] && l === 1) {\n\t\t\t\t\t\t\t\treturn a[c.columns].order - b[c.columns].order;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t// fallback to natural sort since it is more robust\n\t\t\t\t\t\t\tnum = /n/i.test(getCachedSortType(c.parsers, col));\n\t\t\t\t\t\t\tif (num && c.strings[col]) {\n\t\t\t\t\t\t\t\t// sort strings in numerical columns\n\t\t\t\t\t\t\t\tif (typeof (c.string[c.strings[col]]) === 'boolean') {\n\t\t\t\t\t\t\t\t\tnum = (dir ? 1 : -1) * (c.string[c.strings[col]] ? -1 : 1);\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tnum = (c.strings[col]) ? c.string[c.strings[col]] || 0 : 0;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t// fall back to built-in numeric sort\n\t\t\t\t\t\t\t\t// var sort = $.tablesorter[\"sort\" + s](table, a[c], b[c], c, colMax[c], dir);\n\t\t\t\t\t\t\t\tsort = c.numberSorter ? c.numberSorter(a[col], b[col], dir, colMax[col], table) :\n\t\t\t\t\t\t\t\t\tts[ 'sortNumeric' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], num, colMax[col], col, table);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t// set a & b depending on sort direction\n\t\t\t\t\t\t\t\tx = dir ? a : b;\n\t\t\t\t\t\t\t\ty = dir ? b : a;\n\t\t\t\t\t\t\t\t// text sort function\n\t\t\t\t\t\t\t\tif (typeof(cts) === 'function') {\n\t\t\t\t\t\t\t\t\t// custom OVERALL text sorter\n\t\t\t\t\t\t\t\t\tsort = cts(x[col], y[col], dir, col, table);\n\t\t\t\t\t\t\t\t} else if (typeof(cts) === 'object' && cts.hasOwnProperty(col)) {\n\t\t\t\t\t\t\t\t\t// custom text sorter for a SPECIFIC COLUMN\n\t\t\t\t\t\t\t\t\tsort = cts[col](x[col], y[col], dir, col, table);\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t// fall back to natural sort\n\t\t\t\t\t\t\t\t\tsort = ts[ 'sortNatural' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], col, table, c);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (sort) { return sort; }\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn a[c.columns].order - b[c.columns].order;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tif (c.debug) { benchmark(\"Sorting on \" + sortList.toString() + \" and dir \" + order + \" time\", sortTime); }\n\t\t\t}\n\n\t\t\tfunction resortComplete($table, callback){\n\t\t\t\tvar table = $table[0];\n\t\t\t\tif (table.isUpdating) {\n\t\t\t\t\t$table.trigger('updateComplete', table);\n\t\t\t\t}\n\t\t\t\tif ($.isFunction(callback)) {\n\t\t\t\t\tcallback($table[0]);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction checkResort($table, flag, callback) {\n\t\t\t\tvar sl = $table[0].config.sortList;\n\t\t\t\t// don't try to resort if the table is still processing\n\t\t\t\t// this will catch spamming of the updateCell method\n\t\t\t\tif (flag !== false && !$table[0].isProcessing && sl.length) {\n\t\t\t\t\t$table.trigger(\"sorton\", [sl, function(){\n\t\t\t\t\t\tresortComplete($table, callback);\n\t\t\t\t\t}, true]);\n\t\t\t\t} else {\n\t\t\t\t\tresortComplete($table, callback);\n\t\t\t\t\tts.applyWidget($table[0], false);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfunction bindMethods(table){\n\t\t\t\tvar c = table.config,\n\t\t\t\t\t$table = c.$table;\n\t\t\t\t// apply easy methods that trigger bound events\n\t\t\t\t$table\n\t\t\t\t.unbind('sortReset update updateRows updateCell updateAll addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave '.split(' ').join(c.namespace + ' '))\n\t\t\t\t.bind(\"sortReset\" + c.namespace, function(e, callback){\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\tc.sortList = [];\n\t\t\t\t\tsetHeadersCss(table);\n\t\t\t\t\tmultisort(table);\n\t\t\t\t\tappendToTable(table);\n\t\t\t\t\tif ($.isFunction(callback)) {\n\t\t\t\t\t\tcallback(table);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"updateAll\" + c.namespace, function(e, resort, callback){\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\ttable.isUpdating = true;\n\t\t\t\t\tts.refreshWidgets(table, true, true);\n\t\t\t\t\tts.restoreHeaders(table);\n\t\t\t\t\tbuildHeaders(table);\n\t\t\t\t\tts.bindEvents(table, c.$headers, true);\n\t\t\t\t\tbindMethods(table);\n\t\t\t\t\tcommonUpdate(table, resort, callback);\n\t\t\t\t})\n\t\t\t\t.bind(\"update\" + c.namespace + \" updateRows\" + c.namespace, function(e, resort, callback) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\ttable.isUpdating = true;\n\t\t\t\t\t// update sorting (if enabled/disabled)\n\t\t\t\t\tupdateHeader(table);\n\t\t\t\t\tcommonUpdate(table, resort, callback);\n\t\t\t\t})\n\t\t\t\t.bind(\"updateCell\" + c.namespace, function(e, cell, resort, callback) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\ttable.isUpdating = true;\n\t\t\t\t\t$table.find(c.selectorRemove).remove();\n\t\t\t\t\t// get position from the dom\n\t\t\t\t\tvar v, t, row, icell,\n\t\t\t\t\t$tb = $table.find('tbody'),\n\t\t\t\t\t$cell = $(cell),\n\t\t\t\t\t// update cache - format: function(s, table, cell, cellIndex)\n\t\t\t\t\t// no closest in jQuery v1.2.6 - tbdy = $tb.index( $(cell).closest('tbody') ),$row = $(cell).closest('tr');\n\t\t\t\t\ttbdy = $tb.index( $.fn.closest ? $cell.closest('tbody') : $cell.parents('tbody').filter(':first') ),\n\t\t\t\t\t$row = $.fn.closest ? $cell.closest('tr') : $cell.parents('tr').filter(':first');\n\t\t\t\t\tcell = $cell[0]; // in case cell is a jQuery object\n\t\t\t\t\t// tbody may not exist if update is initialized while tbody is removed for processing\n\t\t\t\t\tif ($tb.length && tbdy >= 0) {\n\t\t\t\t\t\trow = $tb.eq(tbdy).find('tr').index( $row );\n\t\t\t\t\t\ticell = $cell.index();\n\t\t\t\t\t\tc.cache[tbdy].normalized[row][c.columns].$row = $row;\n\t\t\t\t\t\tif (typeof c.extractors[icell].id === 'undefined') {\n\t\t\t\t\t\t\tt = getElementText(table, cell, icell);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tt = c.extractors[icell].format( getElementText(table, cell, icell), table, cell, icell );\n\t\t\t\t\t\t}\n\t\t\t\t\t\tv = c.parsers[icell].id === 'no-parser' ? '' :\n\t\t\t\t\t\t\tc.parsers[icell].format( t, table, cell, icell );\n\t\t\t\t\t\tc.cache[tbdy].normalized[row][icell] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v;\n\t\t\t\t\t\tif ((c.parsers[icell].type || '').toLowerCase() === \"numeric\") {\n\t\t\t\t\t\t\t// update column max value (ignore sign)\n\t\t\t\t\t\t\tc.cache[tbdy].colMax[icell] = Math.max(Math.abs(v) || 0, c.cache[tbdy].colMax[icell] || 0);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcheckResort($table, resort, callback);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"addRows\" + c.namespace, function(e, $row, resort, callback) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\ttable.isUpdating = true;\n\t\t\t\t\tif (isEmptyObject(c.cache)) {\n\t\t\t\t\t\t// empty table, do an update instead - fixes #450\n\t\t\t\t\t\tupdateHeader(table);\n\t\t\t\t\t\tcommonUpdate(table, resort, callback);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t$row = $($row).attr('role', 'row'); // make sure we're using a jQuery object\n\t\t\t\t\t\tvar i, j, l, t, v, rowData, cells,\n\t\t\t\t\t\trows = $row.filter('tr').length,\n\t\t\t\t\t\ttbdy = $table.find('tbody').index( $row.parents('tbody').filter(':first') );\n\t\t\t\t\t\t// fixes adding rows to an empty table - see issue #179\n\t\t\t\t\t\tif (!(c.parsers && c.parsers.length)) {\n\t\t\t\t\t\t\tbuildParserCache(table);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// add each row\n\t\t\t\t\t\tfor (i = 0; i < rows; i++) {\n\t\t\t\t\t\t\tl = $row[i].cells.length;\n\t\t\t\t\t\t\tcells = [];\n\t\t\t\t\t\t\trowData = {\n\t\t\t\t\t\t\t\tchild: [],\n\t\t\t\t\t\t\t\t$row : $row.eq(i),\n\t\t\t\t\t\t\t\torder: c.cache[tbdy].normalized.length\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t// add each cell\n\t\t\t\t\t\t\tfor (j = 0; j < l; j++) {\n\t\t\t\t\t\t\t\tif (typeof c.extractors[j].id === 'undefined') {\n\t\t\t\t\t\t\t\t\tt = getElementText(table, $row[i].cells[j], j);\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tt = c.extractors[j].format( getElementText(table, $row[i].cells[j], j), table, $row[i].cells[j], j );\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tv = c.parsers[j].id === 'no-parser' ? '' :\n\t\t\t\t\t\t\t\t\tc.parsers[j].format( t, table, $row[i].cells[j], j );\n\t\t\t\t\t\t\t\tcells[j] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v;\n\t\t\t\t\t\t\t\tif ((c.parsers[j].type || '').toLowerCase() === \"numeric\") {\n\t\t\t\t\t\t\t\t\t// update column max value (ignore sign)\n\t\t\t\t\t\t\t\t\tc.cache[tbdy].colMax[j] = Math.max(Math.abs(cells[j]) || 0, c.cache[tbdy].colMax[j] || 0);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t// add the row data to the end\n\t\t\t\t\t\t\tcells.push(rowData);\n\t\t\t\t\t\t\t// update cache\n\t\t\t\t\t\t\tc.cache[tbdy].normalized.push(cells);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// resort using current settings\n\t\t\t\t\t\tcheckResort($table, resort, callback);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"updateComplete\" + c.namespace, function(){\n\t\t\t\t\ttable.isUpdating = false;\n\t\t\t\t})\n\t\t\t\t.bind(\"sorton\" + c.namespace, function(e, list, callback, init) {\n\t\t\t\t\tvar c = table.config;\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\t$table.trigger(\"sortStart\", this);\n\t\t\t\t\t// update header count index\n\t\t\t\t\tupdateHeaderSortCount(table, list);\n\t\t\t\t\t// set css for headers\n\t\t\t\t\tsetHeadersCss(table);\n\t\t\t\t\t// fixes #346\n\t\t\t\t\tif (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); }\n\t\t\t\t\t$table.trigger(\"sortBegin\", this);\n\t\t\t\t\t// sort the table and append it to the dom\n\t\t\t\t\tmultisort(table);\n\t\t\t\t\tappendToTable(table, init);\n\t\t\t\t\t$table.trigger(\"sortEnd\", this);\n\t\t\t\t\tts.applyWidget(table);\n\t\t\t\t\tif ($.isFunction(callback)) {\n\t\t\t\t\t\tcallback(table);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"appendCache\" + c.namespace, function(e, callback, init) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\tappendToTable(table, init);\n\t\t\t\t\tif ($.isFunction(callback)) {\n\t\t\t\t\t\tcallback(table);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"updateCache\" + c.namespace, function(e, callback){\n\t\t\t\t\t// rebuild parsers\n\t\t\t\t\tif (!(c.parsers && c.parsers.length)) {\n\t\t\t\t\t\tbuildParserCache(table);\n\t\t\t\t\t}\n\t\t\t\t\t// rebuild the cache map\n\t\t\t\t\tbuildCache(table);\n\t\t\t\t\tif ($.isFunction(callback)) {\n\t\t\t\t\t\tcallback(table);\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.bind(\"applyWidgetId\" + c.namespace, function(e, id) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\tts.getWidgetById(id).format(table, c, c.widgetOptions);\n\t\t\t\t})\n\t\t\t\t.bind(\"applyWidgets\" + c.namespace, function(e, init) {\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\t// apply widgets\n\t\t\t\t\tts.applyWidget(table, init);\n\t\t\t\t})\n\t\t\t\t.bind(\"refreshWidgets\" + c.namespace, function(e, all, dontapply){\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\tts.refreshWidgets(table, all, dontapply);\n\t\t\t\t})\n\t\t\t\t.bind(\"destroy\" + c.namespace, function(e, c, cb){\n\t\t\t\t\te.stopPropagation();\n\t\t\t\t\tts.destroy(table, c, cb);\n\t\t\t\t})\n\t\t\t\t.bind(\"resetToLoadState\" + c.namespace, function(){\n\t\t\t\t\t// remove all widgets\n\t\t\t\t\tts.refreshWidgets(table, true, true);\n\t\t\t\t\t// restore original settings; this clears out current settings, but does not clear\n\t\t\t\t\t// values saved to storage.\n\t\t\t\t\tc = $.extend(true, ts.defaults, c.originalSettings);\n\t\t\t\t\ttable.hasInitialized = false;\n\t\t\t\t\t// setup the entire table again\n\t\t\t\t\tts.setup( table, c );\n\t\t\t\t});\n\t\t\t}\n\n\t\t\t/* public methods */\n\t\t\tts.construct = function(settings) {\n\t\t\t\treturn this.each(function() {\n\t\t\t\t\tvar table = this,\n\t\t\t\t\t\t// merge & extend config options\n\t\t\t\t\t\tc = $.extend(true, {}, ts.defaults, settings);\n\t\t\t\t\t\t// save initial settings\n\t\t\t\t\t\tc.originalSettings = settings;\n\t\t\t\t\t// create a table from data (build table widget)\n\t\t\t\t\tif (!table.hasInitialized && ts.buildTable && this.tagName !== 'TABLE') {\n\t\t\t\t\t\t// return the table (in case the original target is the table's container)\n\t\t\t\t\t\tts.buildTable(table, c);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tts.setup(table, c);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t};\n\n\t\t\tts.setup = function(table, c) {\n\t\t\t\t// if no thead or tbody, or tablesorter is already present, quit\n\t\t\t\tif (!table || !table.tHead || table.tBodies.length === 0 || table.hasInitialized === true) {\n\t\t\t\t\treturn c.debug ? log('ERROR: stopping initialization! No table, thead, tbody or tablesorter has already been initialized') : '';\n\t\t\t\t}\n\n\t\t\t\tvar k = '',\n\t\t\t\t\t$table = $(table),\n\t\t\t\t\tm = $.metadata;\n\t\t\t\t// initialization flag\n\t\t\t\ttable.hasInitialized = false;\n\t\t\t\t// table is being processed flag\n\t\t\t\ttable.isProcessing = true;\n\t\t\t\t// make sure to store the config object\n\t\t\t\ttable.config = c;\n\t\t\t\t// save the settings where they read\n\t\t\t\t$.data(table, \"tablesorter\", c);\n\t\t\t\tif (c.debug) { $.data( table, 'startoveralltimer', new Date()); }\n\n\t\t\t\t// removing this in version 3 (only supports jQuery 1.7+)\n\t\t\t\tc.supportsDataObject = (function(version) {\n\t\t\t\t\tversion[0] = parseInt(version[0], 10);\n\t\t\t\t\treturn (version[0] > 1) || (version[0] === 1 && parseInt(version[1], 10) >= 4);\n\t\t\t\t})($.fn.jquery.split(\".\"));\n\t\t\t\t// digit sort text location; keeping max+/- for backwards compatibility\n\t\t\t\tc.string = { 'max': 1, 'min': -1, 'emptymin': 1, 'emptymax': -1, 'zero': 0, 'none': 0, 'null': 0, 'top': true, 'bottom': false };\n\t\t\t\t// ensure case insensitivity\n\t\t\t\tc.emptyTo = c.emptyTo.toLowerCase();\n\t\t\t\tc.stringTo = c.stringTo.toLowerCase();\n\t\t\t\t// add table theme class only if there isn't already one there\n\t\t\t\tif (!/tablesorter\\-/.test($table.attr('class'))) {\n\t\t\t\t\tk = (c.theme !== '' ? ' tablesorter-' + c.theme : '');\n\t\t\t\t}\n\t\t\t\tc.table = table;\n\t\t\t\tc.$table = $table\n\t\t\t\t\t.addClass(ts.css.table + ' ' + c.tableClass + k)\n\t\t\t\t\t.attr('role', 'grid');\n\t\t\t\tc.$headers = $table.find(c.selectorHeaders);\n\n\t\t\t\t// give the table a unique id, which will be used in namespace binding\n\t\t\t\tif (!c.namespace) {\n\t\t\t\t\tc.namespace = '.tablesorter' + Math.random().toString(16).slice(2);\n\t\t\t\t} else {\n\t\t\t\t\t// make sure namespace starts with a period & doesn't have weird characters\n\t\t\t\t\tc.namespace = '.' + c.namespace.replace(/\\W/g,'');\n\t\t\t\t}\n\n\t\t\t\tc.$table.children().children('tr').attr('role', 'row');\n\t\t\t\tc.$tbodies = $table.children('tbody:not(.' + c.cssInfoBlock + ')').attr({\n\t\t\t\t\t'aria-live' : 'polite',\n\t\t\t\t\t'aria-relevant' : 'all'\n\t\t\t\t});\n\t\t\t\tif (c.$table.children('caption').length) {\n\t\t\t\t\tk = c.$table.children('caption')[0];\n\t\t\t\t\tif (!k.id) { k.id = c.namespace.slice(1) + 'caption'; }\n\t\t\t\t\tc.$table.attr('aria-labelledby', k.id);\n\t\t\t\t}\n\t\t\t\tc.widgetInit = {}; // keep a list of initialized widgets\n\t\t\t\t// change textExtraction via data-attribute\n\t\t\t\tc.textExtraction = c.$table.attr('data-text-extraction') || c.textExtraction || 'basic';\n\t\t\t\t// build headers\n\t\t\t\tbuildHeaders(table);\n\t\t\t\t// fixate columns if the users supplies the fixedWidth option\n\t\t\t\t// do this after theme has been applied\n\t\t\t\tfixColumnWidth(table);\n\t\t\t\t// try to auto detect column type, and store in tables config\n\t\t\t\tbuildParserCache(table);\n\t\t\t\t// start total row count at zero\n\t\t\t\tc.totalRows = 0;\n\t\t\t\t// build the cache for the tbody cells\n\t\t\t\t// delayInit will delay building the cache until the user starts a sort\n\t\t\t\tif (!c.delayInit) { buildCache(table); }\n\t\t\t\t// bind all header events and methods\n\t\t\t\tts.bindEvents(table, c.$headers, true);\n\t\t\t\tbindMethods(table);\n\t\t\t\t// get sort list from jQuery data or metadata\n\t\t\t\t// in jQuery < 1.4, an error occurs when calling $table.data()\n\t\t\t\tif (c.supportsDataObject && typeof $table.data().sortlist !== 'undefined') {\n\t\t\t\t\tc.sortList = $table.data().sortlist;\n\t\t\t\t} else if (m && ($table.metadata() && $table.metadata().sortlist)) {\n\t\t\t\t\tc.sortList = $table.metadata().sortlist;\n\t\t\t\t}\n\t\t\t\t// apply widget init code\n\t\t\t\tts.applyWidget(table, true);\n\t\t\t\t// if user has supplied a sort list to constructor\n\t\t\t\tif (c.sortList.length > 0) {\n\t\t\t\t\t$table.trigger(\"sorton\", [c.sortList, {}, !c.initWidgets, true]);\n\t\t\t\t} else {\n\t\t\t\t\tsetHeadersCss(table);\n\t\t\t\t\tif (c.initWidgets) {\n\t\t\t\t\t\t// apply widget format\n\t\t\t\t\t\tts.applyWidget(table, false);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// show processesing icon\n\t\t\t\tif (c.showProcessing) {\n\t\t\t\t\t$table\n\t\t\t\t\t.unbind('sortBegin' + c.namespace + ' sortEnd' + c.namespace)\n\t\t\t\t\t.bind('sortBegin' + c.namespace + ' sortEnd' + c.namespace, function(e) {\n\t\t\t\t\t\tclearTimeout(c.processTimer);\n\t\t\t\t\t\tts.isProcessing(table);\n\t\t\t\t\t\tif (e.type === 'sortBegin') {\n\t\t\t\t\t\t\tc.processTimer = setTimeout(function(){\n\t\t\t\t\t\t\t\tts.isProcessing(table, true);\n\t\t\t\t\t\t\t}, 500);\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\t// initialized\n\t\t\t\ttable.hasInitialized = true;\n\t\t\t\ttable.isProcessing = false;\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tts.benchmark(\"Overall initialization time\", $.data( table, 'startoveralltimer'));\n\t\t\t\t}\n\t\t\t\t$table.trigger('tablesorter-initialized', table);\n\t\t\t\tif (typeof c.initialized === 'function') { c.initialized(table); }\n\t\t\t};\n\n\t\t\tts.getColumnData = function(table, obj, indx, getCell){\n\t\t\t\tif (typeof obj === 'undefined' || obj === null) { return; }\n\t\t\t\ttable = $(table)[0];\n\t\t\t\tvar result, $h, k,\n\t\t\t\t\tc = table.config;\n\t\t\t\tif (obj[indx]) {\n\t\t\t\t\treturn getCell ? obj[indx] : obj[c.$headers.index( c.$headers.filter('[data-column=\"' + indx + '\"]:last') )];\n\t\t\t\t}\n\t\t\t\tfor (k in obj) {\n\t\t\t\t\tif (typeof k === 'string') {\n\t\t\t\t\t\t$h = c.$headers.filter('[data-column=\"' + indx + '\"]:last')\n\t\t\t\t\t\t\t// header cell with class/id\n\t\t\t\t\t\t\t.filter(k)\n\t\t\t\t\t\t\t// find elements within the header cell with cell/id\n\t\t\t\t\t\t\t.add( c.$headers.filter('[data-column=\"' + indx + '\"]:last').find(k) );\n\t\t\t\t\t\tif ($h.length) {\n\t\t\t\t\t\t\treturn obj[k];\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn result;\n\t\t\t};\n\n\t\t\t// computeTableHeaderCellIndexes from:\n\t\t\t// http://www.javascripttoolbox.com/lib/table/examples.php\n\t\t\t// http://www.javascripttoolbox.com/temp/table_cellindex.html\n\t\t\tts.computeColumnIndex = function(trs) {\n\t\t\t\tvar matrix = [],\n\t\t\t\tlookup = {},\n\t\t\t\tcols = 0, // determine the number of columns\n\t\t\t\ti, j, k, l, $cell, cell, cells, rowIndex, cellId, rowSpan, colSpan, firstAvailCol, matrixrow;\n\t\t\t\tfor (i = 0; i < trs.length; i++) {\n\t\t\t\t\tcells = trs[i].cells;\n\t\t\t\t\tfor (j = 0; j < cells.length; j++) {\n\t\t\t\t\t\tcell = cells[j];\n\t\t\t\t\t\t$cell = $(cell);\n\t\t\t\t\t\trowIndex = cell.parentNode.rowIndex;\n\t\t\t\t\t\tcellId = rowIndex + \"-\" + $cell.index();\n\t\t\t\t\t\trowSpan = cell.rowSpan || 1;\n\t\t\t\t\t\tcolSpan = cell.colSpan || 1;\n\t\t\t\t\t\tif (typeof(matrix[rowIndex]) === \"undefined\") {\n\t\t\t\t\t\t\tmatrix[rowIndex] = [];\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// Find first available column in the first row\n\t\t\t\t\t\tfor (k = 0; k < matrix[rowIndex].length + 1; k++) {\n\t\t\t\t\t\t\tif (typeof(matrix[rowIndex][k]) === \"undefined\") {\n\t\t\t\t\t\t\t\tfirstAvailCol = k;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tlookup[cellId] = firstAvailCol;\n\t\t\t\t\t\tcols = Math.max(firstAvailCol, cols);\n\t\t\t\t\t\t// add data-column\n\t\t\t\t\t\t$cell.attr({ 'data-column' : firstAvailCol }); // 'data-row' : rowIndex\n\t\t\t\t\t\tfor (k = rowIndex; k < rowIndex + rowSpan; k++) {\n\t\t\t\t\t\t\tif (typeof(matrix[k]) === \"undefined\") {\n\t\t\t\t\t\t\t\tmatrix[k] = [];\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tmatrixrow = matrix[k];\n\t\t\t\t\t\t\tfor (l = firstAvailCol; l < firstAvailCol + colSpan; l++) {\n\t\t\t\t\t\t\t\tmatrixrow[l] = \"x\";\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// may not be accurate if # header columns !== # tbody columns\n\t\t\t\treturn cols + 1; // add one because it's a zero-based index\n\t\t\t};\n\n\t\t\t// *** Process table ***\n\t\t\t// add processing indicator\n\t\t\tts.isProcessing = function(table, toggle, $ths) {\n\t\t\t\ttable = $(table);\n\t\t\t\tvar c = table[0].config,\n\t\t\t\t\t// default to all headers\n\t\t\t\t\t$h = $ths || table.find('.' + ts.css.header);\n\t\t\t\tif (toggle) {\n\t\t\t\t\t// don't use sortList if custom $ths used\n\t\t\t\t\tif (typeof $ths !== 'undefined' && c.sortList.length > 0) {\n\t\t\t\t\t\t// get headers from the sortList\n\t\t\t\t\t\t$h = $h.filter(function(){\n\t\t\t\t\t\t\t// get data-column from attr to keep  compatibility with jQuery 1.2.6\n\t\t\t\t\t\t\treturn this.sortDisabled ? false : ts.isValueInArray( parseFloat($(this).attr('data-column')), c.sortList) >= 0;\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\ttable.add($h).addClass(ts.css.processing + ' ' + c.cssProcessing);\n\t\t\t\t} else {\n\t\t\t\t\ttable.add($h).removeClass(ts.css.processing + ' ' + c.cssProcessing);\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// detach tbody but save the position\n\t\t\t// don't use tbody because there are portions that look for a tbody index (updateCell)\n\t\t\tts.processTbody = function(table, $tb, getIt){\n\t\t\t\ttable = $(table)[0];\n\t\t\t\tvar holdr;\n\t\t\t\tif (getIt) {\n\t\t\t\t\ttable.isProcessing = true;\n\t\t\t\t\t$tb.before('<span class=\"tablesorter-savemyplace\"/>');\n\t\t\t\t\tholdr = ($.fn.detach) ? $tb.detach() : $tb.remove();\n\t\t\t\t\treturn holdr;\n\t\t\t\t}\n\t\t\t\tholdr = $(table).find('span.tablesorter-savemyplace');\n\t\t\t\t$tb.insertAfter( holdr );\n\t\t\t\tholdr.remove();\n\t\t\t\ttable.isProcessing = false;\n\t\t\t};\n\n\t\t\tts.clearTableBody = function(table) {\n\t\t\t\t$(table)[0].config.$tbodies.children().detach();\n\t\t\t};\n\n\t\t\tts.bindEvents = function(table, $headers, core){\n\t\t\t\ttable = $(table)[0];\n\t\t\t\tvar downTime,\n\t\t\t\t\tc = table.config;\n\t\t\t\tif (core !== true) {\n\t\t\t\t\tc.$extraHeaders = c.$extraHeaders ? c.$extraHeaders.add($headers) : $headers;\n\t\t\t\t}\n\t\t\t\t// apply event handling to headers and/or additional headers (stickyheaders, scroller, etc)\n\t\t\t\t$headers\n\t\t\t\t// http://stackoverflow.com/questions/5312849/jquery-find-self;\n\t\t\t\t.find(c.selectorSort).add( $headers.filter(c.selectorSort) )\n\t\t\t\t.unbind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' '))\n\t\t\t\t.bind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' '), function(e, external) {\n\t\t\t\t\tvar cell, type = e.type;\n\t\t\t\t\t// only recognize left clicks or enter\n\t\t\t\t\tif ( ((e.which || e.button) !== 1 && !/sort|keyup/.test(type)) || (type === 'keyup' && e.which !== 13) ) {\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t\t// ignore long clicks (prevents resizable widget from initializing a sort)\n\t\t\t\t\tif (type === 'mouseup' && external !== true && (new Date().getTime() - downTime > 250)) { return; }\n\t\t\t\t\t// set timer on mousedown\n\t\t\t\t\tif (type === 'mousedown') {\n\t\t\t\t\t\tdownTime = new Date().getTime();\n\t\t\t\t\t\treturn /(input|select|button|textarea)/i.test(e.target.tagName) ||\n\t\t\t\t\t\t\t// allow clicks to contents of selected cells\n\t\t\t\t\t\t\t$(e.target).closest('td,th').hasClass(c.cssAllowClicks) ? '' : !c.cancelSelection;\n\t\t\t\t\t}\n\t\t\t\t\tif (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); }\n\t\t\t\t\t// jQuery v1.2.6 doesn't have closest()\n\t\t\t\t\tcell = $.fn.closest ? $(this).closest('th, td')[0] : /TH|TD/.test(this.tagName) ? this : $(this).parents('th, td')[0];\n\t\t\t\t\t// reference original table headers and find the same cell\n\t\t\t\t\tcell = c.$headers[ $headers.index( cell ) ];\n\t\t\t\t\tif (!cell.sortDisabled) {\n\t\t\t\t\t\tinitSort(table, cell, e);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tif (c.cancelSelection) {\n\t\t\t\t\t// cancel selection\n\t\t\t\t\t$headers\n\t\t\t\t\t\t.attr('unselectable', 'on')\n\t\t\t\t\t\t.bind('selectstart', false)\n\t\t\t\t\t\t.css({\n\t\t\t\t\t\t\t'user-select': 'none',\n\t\t\t\t\t\t\t'MozUserSelect': 'none' // not needed for jQuery 1.8+\n\t\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// restore headers\n\t\t\tts.restoreHeaders = function(table){\n\t\t\t\tvar c = $(table)[0].config;\n\t\t\t\t// don't use c.$headers here in case header cells were swapped\n\t\t\t\tc.$table.find(c.selectorHeaders).each(function(i){\n\t\t\t\t\t// only restore header cells if it is wrapped\n\t\t\t\t\t// because this is also used by the updateAll method\n\t\t\t\t\tif ($(this).find('.' + ts.css.headerIn).length){\n\t\t\t\t\t\t$(this).html( c.headerContent[i] );\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t};\n\n\t\t\tts.destroy = function(table, removeClasses, callback){\n\t\t\t\ttable = $(table)[0];\n\t\t\t\tif (!table.hasInitialized) { return; }\n\t\t\t\t// remove all widgets\n\t\t\t\tts.refreshWidgets(table, true, true);\n\t\t\t\tvar $t = $(table), c = table.config,\n\t\t\t\t$h = $t.find('thead:first'),\n\t\t\t\t$r = $h.find('tr.' + ts.css.headerRow).removeClass(ts.css.headerRow + ' ' + c.cssHeaderRow),\n\t\t\t\t$f = $t.find('tfoot:first > tr').children('th, td');\n\t\t\t\tif (removeClasses === false && $.inArray('uitheme', c.widgets) >= 0) {\n\t\t\t\t\t// reapply uitheme classes, in case we want to maintain appearance\n\t\t\t\t\t$t.trigger('applyWidgetId', ['uitheme']);\n\t\t\t\t\t$t.trigger('applyWidgetId', ['zebra']);\n\t\t\t\t}\n\t\t\t\t// remove widget added rows, just in case\n\t\t\t\t$h.find('tr').not($r).remove();\n\t\t\t\t// disable tablesorter\n\t\t\t\t$t\n\t\t\t\t\t.removeData('tablesorter')\n\t\t\t\t\t.unbind('sortReset update updateAll updateRows updateCell addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave keypress sortBegin sortEnd resetToLoadState '.split(' ').join(c.namespace + ' '));\n\t\t\t\tc.$headers.add($f)\n\t\t\t\t\t.removeClass( [ts.css.header, c.cssHeader, c.cssAsc, c.cssDesc, ts.css.sortAsc, ts.css.sortDesc, ts.css.sortNone].join(' ') )\n\t\t\t\t\t.removeAttr('data-column')\n\t\t\t\t\t.removeAttr('aria-label')\n\t\t\t\t\t.attr('aria-disabled', 'true');\n\t\t\t\t$r.find(c.selectorSort).unbind('mousedown mouseup keypress '.split(' ').join(c.namespace + ' '));\n\t\t\t\tts.restoreHeaders(table);\n\t\t\t\t$t.toggleClass(ts.css.table + ' ' + c.tableClass + ' tablesorter-' + c.theme, removeClasses === false);\n\t\t\t\t// clear flag in case the plugin is initialized again\n\t\t\t\ttable.hasInitialized = false;\n\t\t\t\tdelete table.config.cache;\n\t\t\t\tif (typeof callback === 'function') {\n\t\t\t\t\tcallback(table);\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// *** sort functions ***\n\t\t\t// regex used in natural sort\n\t\t\tts.regex = {\n\t\t\t\tchunk : /(^([+\\-]?(?:0|[1-9]\\d*)(?:\\.\\d*)?(?:[eE][+\\-]?\\d+)?)?$|^0x[0-9a-f]+$|\\d+)/gi, // chunk/tokenize numbers & letters\n\t\t\t\tchunks: /(^\\\\0|\\\\0$)/, // replace chunks @ ends\n\t\t\t\thex: /^0x[0-9a-f]+$/i // hex\n\t\t\t};\n\n\t\t\t// Natural sort - https://github.com/overset/javascript-natural-sort (date sorting removed)\n\t\t\t// this function will only accept strings, or you'll see \"TypeError: undefined is not a function\"\n\t\t\t// I could add a = a.toString(); b = b.toString(); but it'll slow down the sort overall\n\t\t\tts.sortNatural = function(a, b) {\n\t\t\t\tif (a === b) { return 0; }\n\t\t\t\tvar xN, xD, yN, yD, xF, yF, i, mx,\n\t\t\t\t\tr = ts.regex;\n\t\t\t\t// first try and sort Hex codes\n\t\t\t\tif (r.hex.test(b)) {\n\t\t\t\t\txD = parseInt(a.match(r.hex), 16);\n\t\t\t\t\tyD = parseInt(b.match(r.hex), 16);\n\t\t\t\t\tif ( xD < yD ) { return -1; }\n\t\t\t\t\tif ( xD > yD ) { return 1; }\n\t\t\t\t}\n\t\t\t\t// chunk/tokenize\n\t\t\t\txN = a.replace(r.chunk, '\\\\0$1\\\\0').replace(r.chunks, '').split('\\\\0');\n\t\t\t\tyN = b.replace(r.chunk, '\\\\0$1\\\\0').replace(r.chunks, '').split('\\\\0');\n\t\t\t\tmx = Math.max(xN.length, yN.length);\n\t\t\t\t// natural sorting through split numeric strings and default strings\n\t\t\t\tfor (i = 0; i < mx; i++) {\n\t\t\t\t\t// find floats not starting with '0', string or 0 if not defined\n\t\t\t\t\txF = isNaN(xN[i]) ? xN[i] || 0 : parseFloat(xN[i]) || 0;\n\t\t\t\t\tyF = isNaN(yN[i]) ? yN[i] || 0 : parseFloat(yN[i]) || 0;\n\t\t\t\t\t// handle numeric vs string comparison - number < string - (Kyle Adams)\n\t\t\t\t\tif (isNaN(xF) !== isNaN(yF)) { return (isNaN(xF)) ? 1 : -1; }\n\t\t\t\t\t// rely on string comparison if different types - i.e. '02' < 2 != '02' < '2'\n\t\t\t\t\tif (typeof xF !== typeof yF) {\n\t\t\t\t\t\txF += '';\n\t\t\t\t\t\tyF += '';\n\t\t\t\t\t}\n\t\t\t\t\tif (xF < yF) { return -1; }\n\t\t\t\t\tif (xF > yF) { return 1; }\n\t\t\t\t}\n\t\t\t\treturn 0;\n\t\t\t};\n\n\t\t\tts.sortNaturalAsc = function(a, b, col, table, c) {\n\t\t\t\tif (a === b) { return 0; }\n\t\t\t\tvar e = c.string[ (c.empties[col] || c.emptyTo ) ];\n\t\t\t\tif (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; }\n\t\t\t\tif (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; }\n\t\t\t\treturn ts.sortNatural(a, b);\n\t\t\t};\n\n\t\t\tts.sortNaturalDesc = function(a, b, col, table, c) {\n\t\t\t\tif (a === b) { return 0; }\n\t\t\t\tvar e = c.string[ (c.empties[col] || c.emptyTo ) ];\n\t\t\t\tif (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; }\n\t\t\t\tif (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; }\n\t\t\t\treturn ts.sortNatural(b, a);\n\t\t\t};\n\n\t\t\t// basic alphabetical sort\n\t\t\tts.sortText = function(a, b) {\n\t\t\t\treturn a > b ? 1 : (a < b ? -1 : 0);\n\t\t\t};\n\n\t\t\t// return text string value by adding up ascii value\n\t\t\t// so the text is somewhat sorted when using a digital sort\n\t\t\t// this is NOT an alphanumeric sort\n\t\t\tts.getTextValue = function(a, num, mx) {\n\t\t\t\tif (mx) {\n\t\t\t\t\t// make sure the text value is greater than the max numerical value (mx)\n\t\t\t\t\tvar i, l = a ? a.length : 0, n = mx + num;\n\t\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\t\tn += a.charCodeAt(i);\n\t\t\t\t\t}\n\t\t\t\t\treturn num * n;\n\t\t\t\t}\n\t\t\t\treturn 0;\n\t\t\t};\n\n\t\t\tts.sortNumericAsc = function(a, b, num, mx, col, table) {\n\t\t\t\tif (a === b) { return 0; }\n\t\t\t\tvar c = table.config,\n\t\t\t\t\te = c.string[ (c.empties[col] || c.emptyTo ) ];\n\t\t\t\tif (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; }\n\t\t\t\tif (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; }\n\t\t\t\tif (isNaN(a)) { a = ts.getTextValue(a, num, mx); }\n\t\t\t\tif (isNaN(b)) { b = ts.getTextValue(b, num, mx); }\n\t\t\t\treturn a - b;\n\t\t\t};\n\n\t\t\tts.sortNumericDesc = function(a, b, num, mx, col, table) {\n\t\t\t\tif (a === b) { return 0; }\n\t\t\t\tvar c = table.config,\n\t\t\t\t\te = c.string[ (c.empties[col] || c.emptyTo ) ];\n\t\t\t\tif (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; }\n\t\t\t\tif (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; }\n\t\t\t\tif (isNaN(a)) { a = ts.getTextValue(a, num, mx); }\n\t\t\t\tif (isNaN(b)) { b = ts.getTextValue(b, num, mx); }\n\t\t\t\treturn b - a;\n\t\t\t};\n\n\t\t\tts.sortNumeric = function(a, b) {\n\t\t\t\treturn a - b;\n\t\t\t};\n\n\t\t\t// used when replacing accented characters during sorting\n\t\t\tts.characterEquivalents = {\n\t\t\t\t\"a\" : \"\\u00e1\\u00e0\\u00e2\\u00e3\\u00e4\\u0105\\u00e5\", // áàâãäąå\n\t\t\t\t\"A\" : \"\\u00c1\\u00c0\\u00c2\\u00c3\\u00c4\\u0104\\u00c5\", // ÁÀÂÃÄĄÅ\n\t\t\t\t\"c\" : \"\\u00e7\\u0107\\u010d\", // çćč\n\t\t\t\t\"C\" : \"\\u00c7\\u0106\\u010c\", // ÇĆČ\n\t\t\t\t\"e\" : \"\\u00e9\\u00e8\\u00ea\\u00eb\\u011b\\u0119\", // éèêëěę\n\t\t\t\t\"E\" : \"\\u00c9\\u00c8\\u00ca\\u00cb\\u011a\\u0118\", // ÉÈÊËĚĘ\n\t\t\t\t\"i\" : \"\\u00ed\\u00ec\\u0130\\u00ee\\u00ef\\u0131\", // íìİîïı\n\t\t\t\t\"I\" : \"\\u00cd\\u00cc\\u0130\\u00ce\\u00cf\", // ÍÌİÎÏ\n\t\t\t\t\"o\" : \"\\u00f3\\u00f2\\u00f4\\u00f5\\u00f6\", // óòôõö\n\t\t\t\t\"O\" : \"\\u00d3\\u00d2\\u00d4\\u00d5\\u00d6\", // ÓÒÔÕÖ\n\t\t\t\t\"ss\": \"\\u00df\", // ß (s sharp)\n\t\t\t\t\"SS\": \"\\u1e9e\", // ẞ (Capital sharp s)\n\t\t\t\t\"u\" : \"\\u00fa\\u00f9\\u00fb\\u00fc\\u016f\", // úùûüů\n\t\t\t\t\"U\" : \"\\u00da\\u00d9\\u00db\\u00dc\\u016e\" // ÚÙÛÜŮ\n\t\t\t};\n\t\t\tts.replaceAccents = function(s) {\n\t\t\t\tvar a, acc = '[', eq = ts.characterEquivalents;\n\t\t\t\tif (!ts.characterRegex) {\n\t\t\t\t\tts.characterRegexArray = {};\n\t\t\t\t\tfor (a in eq) {\n\t\t\t\t\t\tif (typeof a === 'string') {\n\t\t\t\t\t\t\tacc += eq[a];\n\t\t\t\t\t\t\tts.characterRegexArray[a] = new RegExp('[' + eq[a] + ']', 'g');\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tts.characterRegex = new RegExp(acc + ']');\n\t\t\t\t}\n\t\t\t\tif (ts.characterRegex.test(s)) {\n\t\t\t\t\tfor (a in eq) {\n\t\t\t\t\t\tif (typeof a === 'string') {\n\t\t\t\t\t\t\ts = s.replace( ts.characterRegexArray[a], a );\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn s;\n\t\t\t};\n\n\t\t\t// *** utilities ***\n\t\t\tts.isValueInArray = function(column, arry) {\n\t\t\t\tvar indx, len = arry.length;\n\t\t\t\tfor (indx = 0; indx < len; indx++) {\n\t\t\t\t\tif (arry[indx][0] === column) {\n\t\t\t\t\t\treturn indx;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn -1;\n\t\t\t};\n\n\t\t\tts.addParser = function(parser) {\n\t\t\t\tvar i, l = ts.parsers.length, a = true;\n\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\tif (ts.parsers[i].id.toLowerCase() === parser.id.toLowerCase()) {\n\t\t\t\t\t\ta = false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (a) {\n\t\t\t\t\tts.parsers.push(parser);\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tts.getParserById = function(name) {\n\t\t\t\t/*jshint eqeqeq:false */\n\t\t\t\tif (name == 'false') { return false; }\n\t\t\t\tvar i, l = ts.parsers.length;\n\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\tif (ts.parsers[i].id.toLowerCase() === (name.toString()).toLowerCase()) {\n\t\t\t\t\t\treturn ts.parsers[i];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t};\n\n\t\t\tts.addWidget = function(widget) {\n\t\t\t\tts.widgets.push(widget);\n\t\t\t};\n\n\t\t\tts.hasWidget = function(table, name){\n\t\t\t\ttable = $(table);\n\t\t\t\treturn table.length && table[0].config && table[0].config.widgetInit[name] || false;\n\t\t\t};\n\n\t\t\tts.getWidgetById = function(name) {\n\t\t\t\tvar i, w, l = ts.widgets.length;\n\t\t\t\tfor (i = 0; i < l; i++) {\n\t\t\t\t\tw = ts.widgets[i];\n\t\t\t\t\tif (w && w.hasOwnProperty('id') && w.id.toLowerCase() === name.toLowerCase()) {\n\t\t\t\t\t\treturn w;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tts.applyWidget = function(table, init) {\n\t\t\t\ttable = $(table)[0]; // in case this is called externally\n\t\t\t\tvar c = table.config,\n\t\t\t\t\two = c.widgetOptions,\n\t\t\t\t\ttableClass = ' ' + c.table.className + ' ',\n\t\t\t\t\twidgets = [],\n\t\t\t\t\ttime, time2, w, wd;\n\t\t\t\t// prevent numerous consecutive widget applications\n\t\t\t\tif (init !== false && table.hasInitialized && (table.isApplyingWidgets || table.isUpdating)) { return; }\n\t\t\t\tif (c.debug) { time = new Date(); }\n\t\t\t\t// look for widgets to apply from in table class\n\t\t\t\t// stop using \\b otherwise this matches \"ui-widget-content\" & adds \"content\" widget\n\t\t\t\twd = new RegExp( '\\\\s' + c.widgetClass.replace( /\\{name\\}/i, '([\\\\w-]+)' )+ '\\\\s', 'g' );\n\t\t\t\tif ( tableClass.match( wd ) ) {\n\t\t\t\t\t// extract out the widget id from the table class (widget id's can include dashes)\n\t\t\t\t\tw = tableClass.match( wd );\n\t\t\t\t\tif ( w ) {\n\t\t\t\t\t\t$.each( w, function( i,n ){\n\t\t\t\t\t\t\tc.widgets.push( n.replace( wd, '$1' ) );\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (c.widgets.length) {\n\t\t\t\t\ttable.isApplyingWidgets = true;\n\t\t\t\t\t// ensure unique widget ids\n\t\t\t\t\tc.widgets = $.grep(c.widgets, function(v, k){\n\t\t\t\t\t\treturn $.inArray(v, c.widgets) === k;\n\t\t\t\t\t});\n\t\t\t\t\t// build widget array & add priority as needed\n\t\t\t\t\t$.each(c.widgets || [], function(i,n){\n\t\t\t\t\t\twd = ts.getWidgetById(n);\n\t\t\t\t\t\tif (wd && wd.id) {\n\t\t\t\t\t\t\t// set priority to 10 if not defined\n\t\t\t\t\t\t\tif (!wd.priority) { wd.priority = 10; }\n\t\t\t\t\t\t\twidgets[i] = wd;\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\t// sort widgets by priority\n\t\t\t\t\twidgets.sort(function(a, b){\n\t\t\t\t\t\treturn a.priority < b.priority ? -1 : a.priority === b.priority ? 0 : 1;\n\t\t\t\t\t});\n\t\t\t\t\t// add/update selected widgets\n\t\t\t\t\t$.each(widgets, function(i,w){\n\t\t\t\t\t\tif (w) {\n\t\t\t\t\t\t\tif (init || !(c.widgetInit[w.id])) {\n\t\t\t\t\t\t\t\t// set init flag first to prevent calling init more than once (e.g. pager)\n\t\t\t\t\t\t\t\tc.widgetInit[w.id] = true;\n\t\t\t\t\t\t\t\tif (w.hasOwnProperty('options')) {\n\t\t\t\t\t\t\t\t\two = table.config.widgetOptions = $.extend( true, {}, w.options, wo );\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tif (w.hasOwnProperty('init')) {\n\t\t\t\t\t\t\t\t\tif (c.debug) { time2 = new Date(); }\n\t\t\t\t\t\t\t\t\tw.init(table, w, c, wo);\n\t\t\t\t\t\t\t\t\tif (c.debug) { ts.benchmark('Initializing ' + w.id + ' widget', time2); }\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tif (!init && w.hasOwnProperty('format')) {\n\t\t\t\t\t\t\t\tif (c.debug) { time2 = new Date(); }\n\t\t\t\t\t\t\t\tw.format(table, c, wo, false);\n\t\t\t\t\t\t\t\tif (c.debug) { ts.benchmark( ( init ? 'Initializing ' : 'Applying ' ) + w.id + ' widget', time2); }\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tsetTimeout(function(){\n\t\t\t\t\ttable.isApplyingWidgets = false;\n\t\t\t\t\t$.data(table, 'lastWidgetApplication', new Date());\n\t\t\t\t}, 0);\n\t\t\t\tif (c.debug) {\n\t\t\t\t\tw = c.widgets.length;\n\t\t\t\t\tbenchmark(\"Completed \" + (init === true ? \"initializing \" : \"applying \") + w + \" widget\" + (w !== 1 ? \"s\" : \"\"), time);\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tts.refreshWidgets = function(table, doAll, dontapply) {\n\t\t\t\ttable = $(table)[0]; // see issue #243\n\t\t\t\tvar i, c = table.config,\n\t\t\t\t\tcw = c.widgets,\n\t\t\t\t\tw = ts.widgets, l = w.length;\n\t\t\t\t// remove previous widgets\n\t\t\t\tfor (i = 0; i < l; i++){\n\t\t\t\t\tif ( w[i] && w[i].id && (doAll || $.inArray( w[i].id, cw ) < 0) ) {\n\t\t\t\t\t\tif (c.debug) { log( 'Refeshing widgets: Removing \"' + w[i].id + '\"' ); }\n\t\t\t\t\t\t// only remove widgets that have been initialized - fixes #442\n\t\t\t\t\t\tif (w[i].hasOwnProperty('remove') && c.widgetInit[w[i].id]) {\n\t\t\t\t\t\t\tw[i].remove(table, c, c.widgetOptions);\n\t\t\t\t\t\t\tc.widgetInit[w[i].id] = false;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (dontapply !== true) {\n\t\t\t\t\tts.applyWidget(table, doAll);\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// get sorter, string, empty, etc options for each column from\n\t\t\t// jQuery data, metadata, header option or header class name (\"sorter-false\")\n\t\t\t// priority = jQuery data > meta > headers option > header class name\n\t\t\tts.getData = function(h, ch, key) {\n\t\t\t\tvar val = '', $h = $(h), m, cl;\n\t\t\t\tif (!$h.length) { return ''; }\n\t\t\t\tm = $.metadata ? $h.metadata() : false;\n\t\t\t\tcl = ' ' + ($h.attr('class') || '');\n\t\t\t\tif (typeof $h.data(key) !== 'undefined' || typeof $h.data(key.toLowerCase()) !== 'undefined'){\n\t\t\t\t\t// \"data-lockedOrder\" is assigned to \"lockedorder\"; but \"data-locked-order\" is assigned to \"lockedOrder\"\n\t\t\t\t\t// \"data-sort-initial-order\" is assigned to \"sortInitialOrder\"\n\t\t\t\t\tval += $h.data(key) || $h.data(key.toLowerCase());\n\t\t\t\t} else if (m && typeof m[key] !== 'undefined') {\n\t\t\t\t\tval += m[key];\n\t\t\t\t} else if (ch && typeof ch[key] !== 'undefined') {\n\t\t\t\t\tval += ch[key];\n\t\t\t\t} else if (cl !== ' ' && cl.match(' ' + key + '-')) {\n\t\t\t\t\t// include sorter class name \"sorter-text\", etc; now works with \"sorter-my-custom-parser\"\n\t\t\t\t\tval = cl.match( new RegExp('\\\\s' + key + '-([\\\\w-]+)') )[1] || '';\n\t\t\t\t}\n\t\t\t\treturn $.trim(val);\n\t\t\t};\n\n\t\t\tts.formatFloat = function(s, table) {\n\t\t\t\tif (typeof s !== 'string' || s === '') { return s; }\n\t\t\t\t// allow using formatFloat without a table; defaults to US number format\n\t\t\t\tvar i,\n\t\t\t\t\tt = table && table.config ? table.config.usNumberFormat !== false :\n\t\t\t\t\t\ttypeof table !== \"undefined\" ? table : true;\n\t\t\t\tif (t) {\n\t\t\t\t\t// US Format - 1,234,567.89 -> 1234567.89\n\t\t\t\t\ts = s.replace(/,/g,'');\n\t\t\t\t} else {\n\t\t\t\t\t// German Format = 1.234.567,89 -> 1234567.89\n\t\t\t\t\t// French Format = 1 234 567,89 -> 1234567.89\n\t\t\t\t\ts = s.replace(/[\\s|\\.]/g,'').replace(/,/g,'.');\n\t\t\t\t}\n\t\t\t\tif(/^\\s*\\([.\\d]+\\)/.test(s)) {\n\t\t\t\t\t// make (#) into a negative number -> (10) = -10\n\t\t\t\t\ts = s.replace(/^\\s*\\(([.\\d]+)\\)/, '-$1');\n\t\t\t\t}\n\t\t\t\ti = parseFloat(s);\n\t\t\t\t// return the text instead of zero\n\t\t\t\treturn isNaN(i) ? $.trim(s) : i;\n\t\t\t};\n\n\t\t\tts.isDigit = function(s) {\n\t\t\t\t// replace all unwanted chars and match\n\t\t\t\treturn isNaN(s) ? (/^[\\-+(]?\\d+[)]?$/).test(s.toString().replace(/[,.'\"\\s]/g, '')) : true;\n\t\t\t};\n\n\t\t}()\n\t});\n\n\t// make shortcut\n\tvar ts = $.tablesorter;\n\n\t// extend plugin scope\n\t$.fn.extend({\n\t\ttablesorter: ts.construct\n\t});\n\n\t// add default parsers\n\tts.addParser({\n\t\tid: 'no-parser',\n\t\tis: function() {\n\t\t\treturn false;\n\t\t},\n\t\tformat: function() {\n\t\t\treturn '';\n\t\t},\n\t\ttype: 'text'\n\t});\n\n\tts.addParser({\n\t\tid: \"text\",\n\t\tis: function() {\n\t\t\treturn true;\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar c = table.config;\n\t\t\tif (s) {\n\t\t\t\ts = $.trim( c.ignoreCase ? s.toLocaleLowerCase() : s );\n\t\t\t\ts = c.sortLocaleCompare ? ts.replaceAccents(s) : s;\n\t\t\t}\n\t\t\treturn s;\n\t\t},\n\t\ttype: \"text\"\n\t});\n\n\tts.addParser({\n\t\tid: \"digit\",\n\t\tis: function(s) {\n\t\t\treturn ts.isDigit(s);\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar n = ts.formatFloat((s || '').replace(/[^\\w,. \\-()]/g, \"\"), table);\n\t\t\treturn s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"currency\",\n\t\tis: function(s) {\n\t\t\treturn (/^\\(?\\d+[\\u00a3$\\u20ac\\u00a4\\u00a5\\u00a2?.]|[\\u00a3$\\u20ac\\u00a4\\u00a5\\u00a2?.]\\d+\\)?$/).test((s || '').replace(/[+\\-,. ]/g,'')); // £$€¤¥¢\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar n = ts.formatFloat((s || '').replace(/[^\\w,. \\-()]/g, \"\"), table);\n\t\t\treturn s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"url\",\n\t\tis: function(s) {\n\t\t\treturn (/^(https?|ftp|file):\\/\\//).test(s);\n\t\t},\n\t\tformat: function(s) {\n\t\t\treturn s ? $.trim(s.replace(/(https?|ftp|file):\\/\\//, '')) : s;\n\t\t},\n\t\tparsed : true, // filter widget flag\n\t\ttype: \"text\"\n\t});\n\n\tts.addParser({\n\t\tid: \"isoDate\",\n\t\tis: function(s) {\n\t\t\treturn (/^\\d{4}[\\/\\-]\\d{1,2}[\\/\\-]\\d{1,2}/).test(s);\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar date = s ? new Date( s.replace(/-/g, \"/\") ) : s;\n\t\t\treturn date instanceof Date && isFinite(date) ? date.getTime() : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"percent\",\n\t\tis: function(s) {\n\t\t\treturn (/(\\d\\s*?%|%\\s*?\\d)/).test(s) && s.length < 15;\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\treturn s ? ts.formatFloat(s.replace(/%/g, \"\"), table) : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\t// added image parser to core v2.17.9\n\tts.addParser({\n\t\tid: \"image\",\n\t\tis: function(s, table, node, $node){\n\t\t\treturn $node.find('img').length > 0;\n\t\t},\n\t\tformat: function(s, table, cell) {\n\t\t\treturn $(cell).find('img').attr(table.config.imgAttr || 'alt') || s;\n\t\t},\n\t\tparsed : true, // filter widget flag\n\t\ttype: \"text\"\n\t});\n\n\tts.addParser({\n\t\tid: \"usLongDate\",\n\t\tis: function(s) {\n\t\t\t// two digit years are not allowed cross-browser\n\t\t\t// Jan 01, 2013 12:34:56 PM or 01 Jan 2013\n\t\t\treturn (/^[A-Z]{3,10}\\.?\\s+\\d{1,2},?\\s+(\\d{4})(\\s+\\d{1,2}:\\d{2}(:\\d{2})?(\\s+[AP]M)?)?$/i).test(s) || (/^\\d{1,2}\\s+[A-Z]{3,10}\\s+\\d{4}/i).test(s);\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar date = s ? new Date( s.replace(/(\\S)([AP]M)$/i, \"$1 $2\") ) : s;\n\t\t\treturn date instanceof Date && isFinite(date) ? date.getTime() : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"shortDate\", // \"mmddyyyy\", \"ddmmyyyy\" or \"yyyymmdd\"\n\t\tis: function(s) {\n\t\t\t// testing for ##-##-#### or ####-##-##, so it's not perfect; time can be included\n\t\t\treturn (/(^\\d{1,2}[\\/\\s]\\d{1,2}[\\/\\s]\\d{4})|(^\\d{4}[\\/\\s]\\d{1,2}[\\/\\s]\\d{1,2})/).test((s || '').replace(/\\s+/g,\" \").replace(/[\\-.,]/g, \"/\"));\n\t\t},\n\t\tformat: function(s, table, cell, cellIndex) {\n\t\t\tif (s) {\n\t\t\t\tvar date, d,\n\t\t\t\t\tc = table.config,\n\t\t\t\t\tci = c.$headers.filter('[data-column=' + cellIndex + ']:last'),\n\t\t\t\t\tformat = ci.length && ci[0].dateFormat || ts.getData( ci, ts.getColumnData( table, c.headers, cellIndex ), 'dateFormat') || c.dateFormat;\n\t\t\t\td = s.replace(/\\s+/g,\" \").replace(/[\\-.,]/g, \"/\"); // escaped - because JSHint in Firefox was showing it as an error\n\t\t\t\tif (format === \"mmddyyyy\") {\n\t\t\t\t\td = d.replace(/(\\d{1,2})[\\/\\s](\\d{1,2})[\\/\\s](\\d{4})/, \"$3/$1/$2\");\n\t\t\t\t} else if (format === \"ddmmyyyy\") {\n\t\t\t\t\td = d.replace(/(\\d{1,2})[\\/\\s](\\d{1,2})[\\/\\s](\\d{4})/, \"$3/$2/$1\");\n\t\t\t\t} else if (format === \"yyyymmdd\") {\n\t\t\t\t\td = d.replace(/(\\d{4})[\\/\\s](\\d{1,2})[\\/\\s](\\d{1,2})/, \"$1/$2/$3\");\n\t\t\t\t}\n\t\t\t\tdate = new Date(d);\n\t\t\t\treturn date instanceof Date && isFinite(date) ? date.getTime() : s;\n\t\t\t}\n\t\t\treturn s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"time\",\n\t\tis: function(s) {\n\t\t\treturn (/^(([0-2]?\\d:[0-5]\\d)|([0-1]?\\d:[0-5]\\d\\s?([AP]M)))$/i).test(s);\n\t\t},\n\t\tformat: function(s, table) {\n\t\t\tvar date = s ? new Date( \"2000/01/01 \" + s.replace(/(\\S)([AP]M)$/i, \"$1 $2\") ) : s;\n\t\t\treturn date instanceof Date && isFinite(date) ? date.getTime() : s;\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\tts.addParser({\n\t\tid: \"metadata\",\n\t\tis: function() {\n\t\t\treturn false;\n\t\t},\n\t\tformat: function(s, table, cell) {\n\t\t\tvar c = table.config,\n\t\t\tp = (!c.parserMetadataName) ? 'sortValue' : c.parserMetadataName;\n\t\t\treturn $(cell).metadata()[p];\n\t\t},\n\t\ttype: \"numeric\"\n\t});\n\n\t// add default widgets\n\tts.addWidget({\n\t\tid: \"zebra\",\n\t\tpriority: 90,\n\t\tformat: function(table, c, wo) {\n\t\t\tvar $tb, $tv, $tr, row, even, time, k,\n\t\t\tchild = new RegExp(c.cssChildRow, 'i'),\n\t\t\tb = c.$tbodies;\n\t\t\tif (c.debug) {\n\t\t\t\ttime = new Date();\n\t\t\t}\n\t\t\tfor (k = 0; k < b.length; k++ ) {\n\t\t\t\t// loop through the visible rows\n\t\t\t\trow = 0;\n\t\t\t\t$tb = b.eq(k);\n\t\t\t\t$tv = $tb.children('tr:visible').not(c.selectorRemove);\n\t\t\t\t// revered back to using jQuery each - strangely it's the fastest method\n\t\t\t\t/*jshint loopfunc:true */\n\t\t\t\t$tv.each(function(){\n\t\t\t\t\t$tr = $(this);\n\t\t\t\t\t// style child rows the same way the parent row was styled\n\t\t\t\t\tif (!child.test(this.className)) { row++; }\n\t\t\t\t\teven = (row % 2 === 0);\n\t\t\t\t\t$tr.removeClass(wo.zebra[even ? 1 : 0]).addClass(wo.zebra[even ? 0 : 1]);\n\t\t\t\t});\n\t\t\t}\n\t\t},\n\t\tremove: function(table, c, wo){\n\t\t\tvar k, $tb,\n\t\t\t\tb = c.$tbodies,\n\t\t\t\trmv = (wo.zebra || [ \"even\", \"odd\" ]).join(' ');\n\t\t\tfor (k = 0; k < b.length; k++ ){\n\t\t\t\t$tb = ts.processTbody(table, b.eq(k), true); // remove tbody\n\t\t\t\t$tb.children().removeClass(rmv);\n\t\t\t\tts.processTbody(table, $tb, false); // restore tbody\n\t\t\t}\n\t\t}\n\t});\n\n})(jQuery);\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.timeago.js",
    "content": "/**\n * Timeago is a jQuery plugin that makes it easy to support automatically\n * updating fuzzy timestamps (e.g. \"4 minutes ago\" or \"about 1 day ago\").\n *\n * @name timeago\n * @version 1.4.3\n * @requires jQuery v1.2.3+\n * @author Ryan McGeary\n * @license MIT License - http://www.opensource.org/licenses/mit-license.php\n *\n * For usage and examples, visit:\n * http://timeago.yarp.com/\n *\n * Copyright (c) 2008-2015, Ryan McGeary (ryan -[at]- mcgeary [*dot*] org)\n */\n\n(function (factory) {\n  if (typeof define === 'function' && define.amd) {\n    // AMD. Register as an anonymous module.\n    define(['jquery'], factory);\n  } else if (typeof module === 'object' && typeof module.exports === 'object') {\n    factory(require('jquery'));\n  } else {\n    // Browser globals\n    factory(jQuery);\n  }\n}(function ($) {\n  $.timeago = function(timestamp) {\n    if (timestamp instanceof Date) {\n      return inWords(timestamp);\n    } else if (typeof timestamp === \"string\") {\n      return inWords($.timeago.parse(timestamp));\n    } else if (typeof timestamp === \"number\") {\n      return inWords(new Date(timestamp));\n    } else {\n      return inWords($.timeago.datetime(timestamp));\n    }\n  };\n  var $t = $.timeago;\n\n  $.extend($.timeago, {\n    settings: {\n      refreshMillis: 60000,\n      allowPast: true,\n      allowFuture: false,\n      localeTitle: false,\n      cutoff: 0,\n      strings: {\n        prefixAgo: null,\n        prefixFromNow: null,\n        suffixAgo: \"ago\",\n        suffixFromNow: \"from now\",\n        inPast: 'any moment now',\n        seconds: \"less than a minute\",\n        minute: \"about a minute\",\n        minutes: \"%d minutes\",\n        hour: \"about an hour\",\n        hours: \"about %d hours\",\n        day: \"a day\",\n        days: \"%d days\",\n        month: \"about a month\",\n        months: \"%d months\",\n        year: \"about a year\",\n        years: \"%d years\",\n        wordSeparator: \" \",\n        numbers: []\n      }\n    },\n\n    inWords: function(distanceMillis) {\n      if(!this.settings.allowPast && ! this.settings.allowFuture) {\n          throw 'timeago allowPast and allowFuture settings can not both be set to false.';\n      }\n\n      var $l = this.settings.strings;\n      var prefix = $l.prefixAgo;\n      var suffix = $l.suffixAgo;\n      if (this.settings.allowFuture) {\n        if (distanceMillis < 0) {\n          prefix = $l.prefixFromNow;\n          suffix = $l.suffixFromNow;\n        }\n      }\n\n      if(!this.settings.allowPast && distanceMillis >= 0) {\n        return this.settings.strings.inPast;\n      }\n\n      var seconds = Math.abs(distanceMillis) / 1000;\n      var minutes = seconds / 60;\n      var hours = minutes / 60;\n      var days = hours / 24;\n      var years = days / 365;\n\n      function substitute(stringOrFunction, number) {\n        var string = $.isFunction(stringOrFunction) ? stringOrFunction(number, distanceMillis) : stringOrFunction;\n        var value = ($l.numbers && $l.numbers[number]) || number;\n        return string.replace(/%d/i, value);\n      }\n\n      var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) ||\n        seconds < 90 && substitute($l.minute, 1) ||\n        minutes < 45 && substitute($l.minutes, Math.round(minutes)) ||\n        minutes < 90 && substitute($l.hour, 1) ||\n        hours < 24 && substitute($l.hours, Math.round(hours)) ||\n        hours < 42 && substitute($l.day, 1) ||\n        days < 30 && substitute($l.days, Math.round(days)) ||\n        days < 45 && substitute($l.month, 1) ||\n        days < 365 && substitute($l.months, Math.round(days / 30)) ||\n        years < 1.5 && substitute($l.year, 1) ||\n        substitute($l.years, Math.round(years));\n\n      var separator = $l.wordSeparator || \"\";\n      if ($l.wordSeparator === undefined) { separator = \" \"; }\n      return $.trim([prefix, words, suffix].join(separator));\n    },\n\n    parse: function(iso8601) {\n      var s = $.trim(iso8601);\n      s = s.replace(/\\.\\d+/,\"\"); // remove milliseconds\n      s = s.replace(/-/,\"/\").replace(/-/,\"/\");\n      s = s.replace(/T/,\" \").replace(/Z/,\" UTC\");\n      s = s.replace(/([\\+\\-]\\d\\d)\\:?(\\d\\d)/,\" $1$2\"); // -04:00 -> -0400\n      s = s.replace(/([\\+\\-]\\d\\d)$/,\" $100\"); // +09 -> +0900\n      return new Date(s);\n    },\n    datetime: function(elem) {\n      var iso8601 = $t.isTime(elem) ? $(elem).attr(\"datetime\") : $(elem).attr(\"title\");\n      return $t.parse(iso8601);\n    },\n    isTime: function(elem) {\n      // jQuery's `is()` doesn't play well with HTML5 in IE\n      return $(elem).get(0).tagName.toLowerCase() === \"time\"; // $(elem).is(\"time\");\n    }\n  });\n\n  // functions that can be called via $(el).timeago('action')\n  // init is default when no action is given\n  // functions are called with context of a single element\n  var functions = {\n    init: function(){\n      var refresh_el = $.proxy(refresh, this);\n      refresh_el();\n      var $s = $t.settings;\n      if ($s.refreshMillis > 0) {\n        this._timeagoInterval = setInterval(refresh_el, $s.refreshMillis);\n      }\n    },\n    update: function(time){\n      var parsedTime = $t.parse(time);\n      $(this).data('timeago', { datetime: parsedTime });\n      if($t.settings.localeTitle) $(this).attr(\"title\", parsedTime.toLocaleString());\n      refresh.apply(this);\n    },\n    updateFromDOM: function(){\n      $(this).data('timeago', { datetime: $t.parse( $t.isTime(this) ? $(this).attr(\"datetime\") : $(this).attr(\"title\") ) });\n      refresh.apply(this);\n    },\n    dispose: function () {\n      if (this._timeagoInterval) {\n        window.clearInterval(this._timeagoInterval);\n        this._timeagoInterval = null;\n      }\n    }\n  };\n\n  $.fn.timeago = function(action, options) {\n    var fn = action ? functions[action] : functions.init;\n    if(!fn){\n      throw new Error(\"Unknown function name '\"+ action +\"' for timeago\");\n    }\n    // each over objects here and call the requested function\n    this.each(function(){\n      fn.call(this, options);\n    });\n    return this;\n  };\n\n  function refresh() {\n    //check if it's still visible\n    if(!$.contains(document.documentElement,this)){\n      //stop if it has been removed\n      $(this).timeago(\"dispose\");\n      return this;\n    }\n\n    var data = prepareData(this);\n    var $s = $t.settings;\n\n    if (!isNaN(data.datetime)) {\n      if ( $s.cutoff == 0 || Math.abs(distance(data.datetime)) < $s.cutoff) {\n        $(this).text(inWords(data.datetime));\n      }\n    }\n    return this;\n  }\n\n  function prepareData(element) {\n    element = $(element);\n    if (!element.data(\"timeago\")) {\n      element.data(\"timeago\", { datetime: $t.datetime(element) });\n      var text = $.trim(element.text());\n      if ($t.settings.localeTitle) {\n        element.attr(\"title\", element.data('timeago').datetime.toLocaleString());\n      } else if (text.length > 0 && !($t.isTime(element) && element.attr(\"title\"))) {\n        element.attr(\"title\", text);\n      }\n    }\n    return element.data(\"timeago\");\n  }\n\n  function inWords(date) {\n    return $t.inWords(distance(date));\n  }\n\n  function distance(date) {\n    return (new Date().getTime() - date.getTime());\n  }\n\n  // fix for IE6 suckage\n  document.createElement(\"abbr\");\n  document.createElement(\"time\");\n}));\n"
  },
  {
    "path": "vendor/assets/javascripts/jquery.tipTip.js",
    "content": " /*\n * TipTip\n * Copyright 2010 Drew Wilson\n * www.drewwilson.com\n * code.drewwilson.com/entry/tiptip-jquery-plugin\n *\n * Version 1.3   -   Updated: Mar. 23, 2010\n *\n * This Plug-In will create a custom tooltip to replace the default\n * browser tooltip. It is extremely lightweight and very smart in\n * that it detects the edges of the browser window and will make sure\n * the tooltip stays within the current window size. As a result the\n * tooltip will adjust itself to be displayed above, below, to the left \n * or to the right depending on what is necessary to stay within the\n * browser window. It is completely customizable as well via CSS.\n *\n * This TipTip jQuery plug-in is dual licensed under the MIT and GPL licenses:\n *   http://www.opensource.org/licenses/mit-license.php\n *   http://www.gnu.org/licenses/gpl.html\n */\n\n(function($){\n\t$.fn.tipTip = function(options) {\n\t\tvar defaults = { \n\t\t\tactivation: \"hover\",\n\t\t\tkeepAlive: false,\n\t\t\tmaxWidth: \"200px\",\n\t\t\tedgeOffset: 3,\n\t\t\tdefaultPosition: \"bottom\",\n\t\t\tdelay: 400,\n\t\t\tfadeIn: 200,\n\t\t\tfadeOut: 200,\n\t\t\tattribute: \"title\",\n\t\t\tcontent: false, // HTML or String to fill TipTIp with\n\t\t  \tenter: function(){},\n\t\t  \texit: function(){}\n\t  \t};\n\t \tvar opts = $.extend(defaults, options);\n\t \t\n\t \t// Setup tip tip elements and render them to the DOM\n\t \tif($(\"#tiptip_holder\").length <= 0){\n\t \t\tvar tiptip_holder = $('<div id=\"tiptip_holder\" style=\"max-width:'+ opts.maxWidth +';\"></div>');\n\t\t\tvar tiptip_content = $('<div id=\"tiptip_content\"></div>');\n\t\t\tvar tiptip_arrow = $('<div id=\"tiptip_arrow\"></div>');\n\t\t\t$(\"body\").append(tiptip_holder.html(tiptip_content).prepend(tiptip_arrow.html('<div id=\"tiptip_arrow_inner\"></div>')));\n\t\t} else {\n\t\t\tvar tiptip_holder = $(\"#tiptip_holder\");\n\t\t\tvar tiptip_content = $(\"#tiptip_content\");\n\t\t\tvar tiptip_arrow = $(\"#tiptip_arrow\");\n\t\t}\n\t\t\n\t\treturn this.each(function(){\n\t\t\tvar org_elem = $(this);\n\t\t\tif(opts.content){\n\t\t\t\tvar org_title = opts.content;\n\t\t\t} else {\n\t\t\t\tvar org_title = org_elem.attr(opts.attribute);\n\t\t\t}\n\t\t\tif(org_title != \"\"){\n\t\t\t\tif(!opts.content){\n\t\t\t\t\torg_elem.removeAttr(opts.attribute); //remove original Attribute\n\t\t\t\t}\n\t\t\t\tvar timeout = false;\n\t\t\t\t\n\t\t\t\tif(opts.activation == \"hover\"){\n\t\t\t\t\torg_elem.hover(function(){\n\t\t\t\t\t\tactive_tiptip();\n\t\t\t\t\t}, function(){\n\t\t\t\t\t\tif(!opts.keepAlive){\n\t\t\t\t\t\t\tdeactive_tiptip();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\tif(opts.keepAlive){\n\t\t\t\t\t\ttiptip_holder.hover(function(){}, function(){\n\t\t\t\t\t\t\tdeactive_tiptip();\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if(opts.activation == \"focus\"){\n\t\t\t\t\torg_elem.focus(function(){\n\t\t\t\t\t\tactive_tiptip();\n\t\t\t\t\t}).blur(function(){\n\t\t\t\t\t\tdeactive_tiptip();\n\t\t\t\t\t});\n\t\t\t\t} else if(opts.activation == \"click\"){\n\t\t\t\t\torg_elem.click(function(){\n\t\t\t\t\t\tactive_tiptip();\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}).hover(function(){},function(){\n\t\t\t\t\t\tif(!opts.keepAlive){\n\t\t\t\t\t\t\tdeactive_tiptip();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\tif(opts.keepAlive){\n\t\t\t\t\t\ttiptip_holder.hover(function(){}, function(){\n\t\t\t\t\t\t\tdeactive_tiptip();\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\n\t\t\t\tfunction active_tiptip(){\n\t\t\t\t\topts.enter.call(this);\n\t\t\t\t\ttiptip_content.html(org_title);\n\t\t\t\t\ttiptip_holder.hide().removeAttr(\"class\").css(\"margin\",\"0\");\n\t\t\t\t\ttiptip_arrow.removeAttr(\"style\");\n\t\t\t\t\t\n\t\t\t\t\tvar top = parseInt(org_elem.offset()['top']);\n\t\t\t\t\tvar left = parseInt(org_elem.offset()['left']);\n\t\t\t\t\tvar org_width = parseInt(org_elem.outerWidth());\n\t\t\t\t\tvar org_height = parseInt(org_elem.outerHeight());\n\t\t\t\t\tvar tip_w = tiptip_holder.outerWidth();\n\t\t\t\t\tvar tip_h = tiptip_holder.outerHeight();\n\t\t\t\t\tvar w_compare = Math.round((org_width - tip_w) / 2);\n\t\t\t\t\tvar h_compare = Math.round((org_height - tip_h) / 2);\n\t\t\t\t\tvar marg_left = Math.round(left + w_compare);\n\t\t\t\t\tvar marg_top = Math.round(top + org_height + opts.edgeOffset);\n\t\t\t\t\tvar t_class = \"\";\n\t\t\t\t\tvar arrow_top = \"\";\n\t\t\t\t\tvar arrow_left = Math.round(tip_w - 12) / 2;\n\n                    if(opts.defaultPosition == \"bottom\"){\n                    \tt_class = \"_bottom\";\n                   \t} else if(opts.defaultPosition == \"top\"){ \n                   \t\tt_class = \"_top\";\n                   \t} else if(opts.defaultPosition == \"left\"){\n                   \t\tt_class = \"_left\";\n                   \t} else if(opts.defaultPosition == \"right\"){\n                   \t\tt_class = \"_right\";\n                   \t}\n\t\t\t\t\t\n\t\t\t\t\tvar right_compare = (w_compare + left) < parseInt($(window).scrollLeft());\n\t\t\t\t\tvar left_compare = (tip_w + left) > parseInt($(window).width());\n\t\t\t\t\t\n\t\t\t\t\tif((right_compare && w_compare < 0) || (t_class == \"_right\" && !left_compare) || (t_class == \"_left\" && left < (tip_w + opts.edgeOffset + 5))){\n\t\t\t\t\t\tt_class = \"_right\";\n\t\t\t\t\t\tarrow_top = Math.round(tip_h - 13) / 2;\n\t\t\t\t\t\tarrow_left = -12;\n\t\t\t\t\t\tmarg_left = Math.round(left + org_width + opts.edgeOffset);\n\t\t\t\t\t\tmarg_top = Math.round(top + h_compare);\n\t\t\t\t\t} else if((left_compare && w_compare < 0) || (t_class == \"_left\" && !right_compare)){\n\t\t\t\t\t\tt_class = \"_left\";\n\t\t\t\t\t\tarrow_top = Math.round(tip_h - 13) / 2;\n\t\t\t\t\t\tarrow_left =  Math.round(tip_w);\n\t\t\t\t\t\tmarg_left = Math.round(left - (tip_w + opts.edgeOffset + 5));\n\t\t\t\t\t\tmarg_top = Math.round(top + h_compare);\n\t\t\t\t\t}\n\n\t\t\t\t\tvar top_compare = (top + org_height + opts.edgeOffset + tip_h + 8) > parseInt($(window).height() + $(window).scrollTop());\n\t\t\t\t\tvar bottom_compare = ((top + org_height) - (opts.edgeOffset + tip_h + 8)) < 0;\n\t\t\t\t\t\n\t\t\t\t\tif(top_compare || (t_class == \"_bottom\" && top_compare) || (t_class == \"_top\" && !bottom_compare)){\n\t\t\t\t\t\tif(t_class == \"_top\" || t_class == \"_bottom\"){\n\t\t\t\t\t\t\tt_class = \"_top\";\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tt_class = t_class+\"_top\";\n\t\t\t\t\t\t}\n\t\t\t\t\t\tarrow_top = tip_h;\n\t\t\t\t\t\tmarg_top = Math.round(top - (tip_h + 5 + opts.edgeOffset));\n\t\t\t\t\t} else if(bottom_compare | (t_class == \"_top\" && bottom_compare) || (t_class == \"_bottom\" && !top_compare)){\n\t\t\t\t\t\tif(t_class == \"_top\" || t_class == \"_bottom\"){\n\t\t\t\t\t\t\tt_class = \"_bottom\";\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tt_class = t_class+\"_bottom\";\n\t\t\t\t\t\t}\n\t\t\t\t\t\tarrow_top = -12;\t\t\t\t\t\t\n\t\t\t\t\t\tmarg_top = Math.round(top + org_height + opts.edgeOffset);\n\t\t\t\t\t}\n\t\t\t\t\n\t\t\t\t\tif(t_class == \"_right_top\" || t_class == \"_left_top\"){\n\t\t\t\t\t\tmarg_top = marg_top + 5;\n\t\t\t\t\t} else if(t_class == \"_right_bottom\" || t_class == \"_left_bottom\"){\t\t\n\t\t\t\t\t\tmarg_top = marg_top - 5;\n\t\t\t\t\t}\n\t\t\t\t\tif(t_class == \"_left_top\" || t_class == \"_left_bottom\"){\t\n\t\t\t\t\t\tmarg_left = marg_left + 5;\n\t\t\t\t\t}\n\t\t\t\t\ttiptip_arrow.css({\"margin-left\": arrow_left+\"px\", \"margin-top\": arrow_top+\"px\"});\n\t\t\t\t\ttiptip_holder.css({\"margin-left\": marg_left+\"px\", \"margin-top\": marg_top+\"px\"}).attr(\"class\",\"tip\"+t_class);\n\t\t\t\t\t\n\t\t\t\t\tif (timeout){ clearTimeout(timeout); }\n\t\t\t\t\ttimeout = setTimeout(function(){ tiptip_holder.stop(true,true).fadeIn(opts.fadeIn); }, opts.delay);\t\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tfunction deactive_tiptip(){\n\t\t\t\t\topts.exit.call(this);\n\t\t\t\t\tif (timeout){ clearTimeout(timeout); }\n\t\t\t\t\ttiptip_holder.fadeOut(opts.fadeOut);\n\t\t\t\t}\n\t\t\t}\t\t\t\t\n\t\t});\n\t}\n})(jQuery);  \t"
  },
  {
    "path": "vendor/assets/javascripts/moment.js",
    "content": "// moment.js\n// version : 2.0.0\n// author : Tim Wood\n// license : MIT\n// momentjs.com\n\n(function (undefined) {\n\n    /************************************\n        Constants\n    ************************************/\n\n    var moment,\n        VERSION = \"2.0.0\",\n        round = Math.round, i,\n        // internal storage for language config files\n        languages = {},\n\n        // check for nodeJS\n        hasModule = (typeof module !== 'undefined' && module.exports),\n\n        // ASP.NET json date format regex\n        aspNetJsonRegex = /^\\/?Date\\((\\-?\\d+)/i,\n\n        // format tokens\n        formattingTokens = /(\\[[^\\[]*\\])|(\\\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|.)/g,\n        localFormattingTokens = /(\\[[^\\[]*\\])|(\\\\)?(LT|LL?L?L?|l{1,4})/g,\n\n        // parsing tokens\n        parseMultipleFormatChunker = /([0-9a-zA-Z\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]+)/gi,\n\n        // parsing token regexes\n        parseTokenOneOrTwoDigits = /\\d\\d?/, // 0 - 99\n        parseTokenOneToThreeDigits = /\\d{1,3}/, // 0 - 999\n        parseTokenThreeDigits = /\\d{3}/, // 000 - 999\n        parseTokenFourDigits = /\\d{1,4}/, // 0 - 9999\n        parseTokenSixDigits = /[+\\-]?\\d{1,6}/, // -999,999 - 999,999\n        parseTokenWord = /[0-9]*[a-z\\u00A0-\\u05FF\\u0700-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]+|[\\u0600-\\u06FF]+\\s*?[\\u0600-\\u06FF]+/i, // any word (or two) characters or numbers including two word month in arabic.\n        parseTokenTimezone = /Z|[\\+\\-]\\d\\d:?\\d\\d/i, // +00:00 -00:00 +0000 -0000 or Z\n        parseTokenT = /T/i, // T (ISO seperator)\n        parseTokenTimestampMs = /[\\+\\-]?\\d+(\\.\\d{1,3})?/, // 123456789 123456789.123\n\n        // preliminary iso regex\n        // 0000-00-00 + T + 00 or 00:00 or 00:00:00 or 00:00:00.000 + +00:00 or +0000\n        isoRegex = /^\\s*\\d{4}-\\d\\d-\\d\\d((T| )(\\d\\d(:\\d\\d(:\\d\\d(\\.\\d\\d?\\d?)?)?)?)?([\\+\\-]\\d\\d:?\\d\\d)?)?/,\n        isoFormat = 'YYYY-MM-DDTHH:mm:ssZ',\n\n        // iso time formats and regexes\n        isoTimes = [\n            ['HH:mm:ss.S', /(T| )\\d\\d:\\d\\d:\\d\\d\\.\\d{1,3}/],\n            ['HH:mm:ss', /(T| )\\d\\d:\\d\\d:\\d\\d/],\n            ['HH:mm', /(T| )\\d\\d:\\d\\d/],\n            ['HH', /(T| )\\d\\d/]\n        ],\n\n        // timezone chunker \"+10:00\" > [\"10\", \"00\"] or \"-1530\" > [\"-15\", \"30\"]\n        parseTimezoneChunker = /([\\+\\-]|\\d\\d)/gi,\n\n        // getter and setter names\n        proxyGettersAndSetters = 'Month|Date|Hours|Minutes|Seconds|Milliseconds'.split('|'),\n        unitMillisecondFactors = {\n            'Milliseconds' : 1,\n            'Seconds' : 1e3,\n            'Minutes' : 6e4,\n            'Hours' : 36e5,\n            'Days' : 864e5,\n            'Months' : 2592e6,\n            'Years' : 31536e6\n        },\n\n        // format function strings\n        formatFunctions = {},\n\n        // tokens to ordinalize and pad\n        ordinalizeTokens = 'DDD w W M D d'.split(' '),\n        paddedTokens = 'M D H h m s w W'.split(' '),\n\n        formatTokenFunctions = {\n            M    : function () {\n                return this.month() + 1;\n            },\n            MMM  : function (format) {\n                return this.lang().monthsShort(this, format);\n            },\n            MMMM : function (format) {\n                return this.lang().months(this, format);\n            },\n            D    : function () {\n                return this.date();\n            },\n            DDD  : function () {\n                return this.dayOfYear();\n            },\n            d    : function () {\n                return this.day();\n            },\n            dd   : function (format) {\n                return this.lang().weekdaysMin(this, format);\n            },\n            ddd  : function (format) {\n                return this.lang().weekdaysShort(this, format);\n            },\n            dddd : function (format) {\n                return this.lang().weekdays(this, format);\n            },\n            w    : function () {\n                return this.week();\n            },\n            W    : function () {\n                return this.isoWeek();\n            },\n            YY   : function () {\n                return leftZeroFill(this.year() % 100, 2);\n            },\n            YYYY : function () {\n                return leftZeroFill(this.year(), 4);\n            },\n            YYYYY : function () {\n                return leftZeroFill(this.year(), 5);\n            },\n            a    : function () {\n                return this.lang().meridiem(this.hours(), this.minutes(), true);\n            },\n            A    : function () {\n                return this.lang().meridiem(this.hours(), this.minutes(), false);\n            },\n            H    : function () {\n                return this.hours();\n            },\n            h    : function () {\n                return this.hours() % 12 || 12;\n            },\n            m    : function () {\n                return this.minutes();\n            },\n            s    : function () {\n                return this.seconds();\n            },\n            S    : function () {\n                return ~~(this.milliseconds() / 100);\n            },\n            SS   : function () {\n                return leftZeroFill(~~(this.milliseconds() / 10), 2);\n            },\n            SSS  : function () {\n                return leftZeroFill(this.milliseconds(), 3);\n            },\n            Z    : function () {\n                var a = -this.zone(),\n                    b = \"+\";\n                if (a < 0) {\n                    a = -a;\n                    b = \"-\";\n                }\n                return b + leftZeroFill(~~(a / 60), 2) + \":\" + leftZeroFill(~~a % 60, 2);\n            },\n            ZZ   : function () {\n                var a = -this.zone(),\n                    b = \"+\";\n                if (a < 0) {\n                    a = -a;\n                    b = \"-\";\n                }\n                return b + leftZeroFill(~~(10 * a / 6), 4);\n            },\n            X    : function () {\n                return this.unix();\n            }\n        };\n\n    function padToken(func, count) {\n        return function (a) {\n            return leftZeroFill(func.call(this, a), count);\n        };\n    }\n    function ordinalizeToken(func) {\n        return function (a) {\n            return this.lang().ordinal(func.call(this, a));\n        };\n    }\n\n    while (ordinalizeTokens.length) {\n        i = ordinalizeTokens.pop();\n        formatTokenFunctions[i + 'o'] = ordinalizeToken(formatTokenFunctions[i]);\n    }\n    while (paddedTokens.length) {\n        i = paddedTokens.pop();\n        formatTokenFunctions[i + i] = padToken(formatTokenFunctions[i], 2);\n    }\n    formatTokenFunctions.DDDD = padToken(formatTokenFunctions.DDD, 3);\n\n\n    /************************************\n        Constructors\n    ************************************/\n\n    function Language() {\n\n    }\n\n    // Moment prototype object\n    function Moment(config) {\n        extend(this, config);\n    }\n\n    // Duration Constructor\n    function Duration(duration) {\n        var data = this._data = {},\n            years = duration.years || duration.year || duration.y || 0,\n            months = duration.months || duration.month || duration.M || 0,\n            weeks = duration.weeks || duration.week || duration.w || 0,\n            days = duration.days || duration.day || duration.d || 0,\n            hours = duration.hours || duration.hour || duration.h || 0,\n            minutes = duration.minutes || duration.minute || duration.m || 0,\n            seconds = duration.seconds || duration.second || duration.s || 0,\n            milliseconds = duration.milliseconds || duration.millisecond || duration.ms || 0;\n\n        // representation for dateAddRemove\n        this._milliseconds = milliseconds +\n            seconds * 1e3 + // 1000\n            minutes * 6e4 + // 1000 * 60\n            hours * 36e5; // 1000 * 60 * 60\n        // Because of dateAddRemove treats 24 hours as different from a\n        // day when working around DST, we need to store them separately\n        this._days = days +\n            weeks * 7;\n        // It is impossible translate months into days without knowing\n        // which months you are are talking about, so we have to store\n        // it separately.\n        this._months = months +\n            years * 12;\n\n        // The following code bubbles up values, see the tests for\n        // examples of what that means.\n        data.milliseconds = milliseconds % 1000;\n        seconds += absRound(milliseconds / 1000);\n\n        data.seconds = seconds % 60;\n        minutes += absRound(seconds / 60);\n\n        data.minutes = minutes % 60;\n        hours += absRound(minutes / 60);\n\n        data.hours = hours % 24;\n        days += absRound(hours / 24);\n\n        days += weeks * 7;\n        data.days = days % 30;\n\n        months += absRound(days / 30);\n\n        data.months = months % 12;\n        years += absRound(months / 12);\n\n        data.years = years;\n    }\n\n\n    /************************************\n        Helpers\n    ************************************/\n\n\n    function extend(a, b) {\n        for (var i in b) {\n            if (b.hasOwnProperty(i)) {\n                a[i] = b[i];\n            }\n        }\n        return a;\n    }\n\n    function absRound(number) {\n        if (number < 0) {\n            return Math.ceil(number);\n        } else {\n            return Math.floor(number);\n        }\n    }\n\n    // left zero fill a number\n    // see http://jsperf.com/left-zero-filling for performance comparison\n    function leftZeroFill(number, targetLength) {\n        var output = number + '';\n        while (output.length < targetLength) {\n            output = '0' + output;\n        }\n        return output;\n    }\n\n    // helper function for _.addTime and _.subtractTime\n    function addOrSubtractDurationFromMoment(mom, duration, isAdding) {\n        var ms = duration._milliseconds,\n            d = duration._days,\n            M = duration._months,\n            currentDate;\n\n        if (ms) {\n            mom._d.setTime(+mom + ms * isAdding);\n        }\n        if (d) {\n            mom.date(mom.date() + d * isAdding);\n        }\n        if (M) {\n            currentDate = mom.date();\n            mom.date(1)\n                .month(mom.month() + M * isAdding)\n                .date(Math.min(currentDate, mom.daysInMonth()));\n        }\n    }\n\n    // check if is an array\n    function isArray(input) {\n        return Object.prototype.toString.call(input) === '[object Array]';\n    }\n\n    // compare two arrays, return the number of differences\n    function compareArrays(array1, array2) {\n        var len = Math.min(array1.length, array2.length),\n            lengthDiff = Math.abs(array1.length - array2.length),\n            diffs = 0,\n            i;\n        for (i = 0; i < len; i++) {\n            if (~~array1[i] !== ~~array2[i]) {\n                diffs++;\n            }\n        }\n        return diffs + lengthDiff;\n    }\n\n\n    /************************************\n        Languages\n    ************************************/\n\n\n    Language.prototype = {\n        set : function (config) {\n            var prop, i;\n            for (i in config) {\n                prop = config[i];\n                if (typeof prop === 'function') {\n                    this[i] = prop;\n                } else {\n                    this['_' + i] = prop;\n                }\n            }\n        },\n\n        _months : \"January_February_March_April_May_June_July_August_September_October_November_December\".split(\"_\"),\n        months : function (m) {\n            return this._months[m.month()];\n        },\n\n        _monthsShort : \"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec\".split(\"_\"),\n        monthsShort : function (m) {\n            return this._monthsShort[m.month()];\n        },\n\n        monthsParse : function (monthName) {\n            var i, mom, regex, output;\n\n            if (!this._monthsParse) {\n                this._monthsParse = [];\n            }\n\n            for (i = 0; i < 12; i++) {\n                // make the regex if we don't have it already\n                if (!this._monthsParse[i]) {\n                    mom = moment([2000, i]);\n                    regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, '');\n                    this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i');\n                }\n                // test the regex\n                if (this._monthsParse[i].test(monthName)) {\n                    return i;\n                }\n            }\n        },\n\n        _weekdays : \"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday\".split(\"_\"),\n        weekdays : function (m) {\n            return this._weekdays[m.day()];\n        },\n\n        _weekdaysShort : \"Sun_Mon_Tue_Wed_Thu_Fri_Sat\".split(\"_\"),\n        weekdaysShort : function (m) {\n            return this._weekdaysShort[m.day()];\n        },\n\n        _weekdaysMin : \"Su_Mo_Tu_We_Th_Fr_Sa\".split(\"_\"),\n        weekdaysMin : function (m) {\n            return this._weekdaysMin[m.day()];\n        },\n\n        _longDateFormat : {\n            LT : \"h:mm A\",\n            L : \"MM/DD/YYYY\",\n            LL : \"MMMM D YYYY\",\n            LLL : \"MMMM D YYYY LT\",\n            LLLL : \"dddd, MMMM D YYYY LT\"\n        },\n        longDateFormat : function (key) {\n            var output = this._longDateFormat[key];\n            if (!output && this._longDateFormat[key.toUpperCase()]) {\n                output = this._longDateFormat[key.toUpperCase()].replace(/MMMM|MM|DD|dddd/g, function (val) {\n                    return val.slice(1);\n                });\n                this._longDateFormat[key] = output;\n            }\n            return output;\n        },\n\n        meridiem : function (hours, minutes, isLower) {\n            if (hours > 11) {\n                return isLower ? 'pm' : 'PM';\n            } else {\n                return isLower ? 'am' : 'AM';\n            }\n        },\n\n        _calendar : {\n            sameDay : '[Today at] LT',\n            nextDay : '[Tomorrow at] LT',\n            nextWeek : 'dddd [at] LT',\n            lastDay : '[Yesterday at] LT',\n            lastWeek : '[last] dddd [at] LT',\n            sameElse : 'L'\n        },\n        calendar : function (key, mom) {\n            var output = this._calendar[key];\n            return typeof output === 'function' ? output.apply(mom) : output;\n        },\n\n        _relativeTime : {\n            future : \"in %s\",\n            past : \"%s ago\",\n            s : \"a few seconds\",\n            m : \"a minute\",\n            mm : \"%d minutes\",\n            h : \"an hour\",\n            hh : \"%d hours\",\n            d : \"a day\",\n            dd : \"%d days\",\n            M : \"a month\",\n            MM : \"%d months\",\n            y : \"a year\",\n            yy : \"%d years\"\n        },\n        relativeTime : function (number, withoutSuffix, string, isFuture) {\n            var output = this._relativeTime[string];\n            return (typeof output === 'function') ?\n                output(number, withoutSuffix, string, isFuture) :\n                output.replace(/%d/i, number);\n        },\n        pastFuture : function (diff, output) {\n            var format = this._relativeTime[diff > 0 ? 'future' : 'past'];\n            return typeof format === 'function' ? format(output) : format.replace(/%s/i, output);\n        },\n\n        ordinal : function (number) {\n            return this._ordinal.replace(\"%d\", number);\n        },\n        _ordinal : \"%d\",\n\n        preparse : function (string) {\n            return string;\n        },\n\n        postformat : function (string) {\n            return string;\n        },\n\n        week : function (mom) {\n            return weekOfYear(mom, this._week.dow, this._week.doy);\n        },\n        _week : {\n            dow : 0, // Sunday is the first day of the week.\n            doy : 6  // The week that contains Jan 1st is the first week of the year.\n        }\n    };\n\n    // Loads a language definition into the `languages` cache.  The function\n    // takes a key and optionally values.  If not in the browser and no values\n    // are provided, it will load the language file module.  As a convenience,\n    // this function also returns the language values.\n    function loadLang(key, values) {\n        values.abbr = key;\n        if (!languages[key]) {\n            languages[key] = new Language();\n        }\n        languages[key].set(values);\n        return languages[key];\n    }\n\n    // Determines which language definition to use and returns it.\n    //\n    // With no parameters, it will return the global language.  If you\n    // pass in a language key, such as 'en', it will return the\n    // definition for 'en', so long as 'en' has already been loaded using\n    // moment.lang.\n    function getLangDefinition(key) {\n        if (!key) {\n            return moment.fn._lang;\n        }\n        if (!languages[key] && hasModule) {\n            require('./lang/' + key);\n        }\n        return languages[key];\n    }\n\n\n    /************************************\n        Formatting\n    ************************************/\n\n\n    function removeFormattingTokens(input) {\n        if (input.match(/\\[.*\\]/)) {\n            return input.replace(/^\\[|\\]$/g, \"\");\n        }\n        return input.replace(/\\\\/g, \"\");\n    }\n\n    function makeFormatFunction(format) {\n        var array = format.match(formattingTokens), i, length;\n\n        for (i = 0, length = array.length; i < length; i++) {\n            if (formatTokenFunctions[array[i]]) {\n                array[i] = formatTokenFunctions[array[i]];\n            } else {\n                array[i] = removeFormattingTokens(array[i]);\n            }\n        }\n\n        return function (mom) {\n            var output = \"\";\n            for (i = 0; i < length; i++) {\n                output += typeof array[i].call === 'function' ? array[i].call(mom, format) : array[i];\n            }\n            return output;\n        };\n    }\n\n    // format date using native date object\n    function formatMoment(m, format) {\n        var i = 5;\n\n        function replaceLongDateFormatTokens(input) {\n            return m.lang().longDateFormat(input) || input;\n        }\n\n        while (i-- && localFormattingTokens.test(format)) {\n            format = format.replace(localFormattingTokens, replaceLongDateFormatTokens);\n        }\n\n        if (!formatFunctions[format]) {\n            formatFunctions[format] = makeFormatFunction(format);\n        }\n\n        return formatFunctions[format](m);\n    }\n\n\n    /************************************\n        Parsing\n    ************************************/\n\n\n    // get the regex to find the next token\n    function getParseRegexForToken(token) {\n        switch (token) {\n        case 'DDDD':\n            return parseTokenThreeDigits;\n        case 'YYYY':\n            return parseTokenFourDigits;\n        case 'YYYYY':\n            return parseTokenSixDigits;\n        case 'S':\n        case 'SS':\n        case 'SSS':\n        case 'DDD':\n            return parseTokenOneToThreeDigits;\n        case 'MMM':\n        case 'MMMM':\n        case 'dd':\n        case 'ddd':\n        case 'dddd':\n        case 'a':\n        case 'A':\n            return parseTokenWord;\n        case 'X':\n            return parseTokenTimestampMs;\n        case 'Z':\n        case 'ZZ':\n            return parseTokenTimezone;\n        case 'T':\n            return parseTokenT;\n        case 'MM':\n        case 'DD':\n        case 'YY':\n        case 'HH':\n        case 'hh':\n        case 'mm':\n        case 'ss':\n        case 'M':\n        case 'D':\n        case 'd':\n        case 'H':\n        case 'h':\n        case 'm':\n        case 's':\n            return parseTokenOneOrTwoDigits;\n        default :\n            return new RegExp(token.replace('\\\\', ''));\n        }\n    }\n\n    // function to convert string input to date\n    function addTimeToArrayFromToken(token, input, config) {\n        var a, b,\n            datePartArray = config._a;\n\n        switch (token) {\n        // MONTH\n        case 'M' : // fall through to MM\n        case 'MM' :\n            datePartArray[1] = (input == null) ? 0 : ~~input - 1;\n            break;\n        case 'MMM' : // fall through to MMMM\n        case 'MMMM' :\n            a = getLangDefinition(config._l).monthsParse(input);\n            // if we didn't find a month name, mark the date as invalid.\n            if (a != null) {\n                datePartArray[1] = a;\n            } else {\n                config._isValid = false;\n            }\n            break;\n        // DAY OF MONTH\n        case 'D' : // fall through to DDDD\n        case 'DD' : // fall through to DDDD\n        case 'DDD' : // fall through to DDDD\n        case 'DDDD' :\n            if (input != null) {\n                datePartArray[2] = ~~input;\n            }\n            break;\n        // YEAR\n        case 'YY' :\n            datePartArray[0] = ~~input + (~~input > 68 ? 1900 : 2000);\n            break;\n        case 'YYYY' :\n        case 'YYYYY' :\n            datePartArray[0] = ~~input;\n            break;\n        // AM / PM\n        case 'a' : // fall through to A\n        case 'A' :\n            config._isPm = ((input + '').toLowerCase() === 'pm');\n            break;\n        // 24 HOUR\n        case 'H' : // fall through to hh\n        case 'HH' : // fall through to hh\n        case 'h' : // fall through to hh\n        case 'hh' :\n            datePartArray[3] = ~~input;\n            break;\n        // MINUTE\n        case 'm' : // fall through to mm\n        case 'mm' :\n            datePartArray[4] = ~~input;\n            break;\n        // SECOND\n        case 's' : // fall through to ss\n        case 'ss' :\n            datePartArray[5] = ~~input;\n            break;\n        // MILLISECOND\n        case 'S' :\n        case 'SS' :\n        case 'SSS' :\n            datePartArray[6] = ~~ (('0.' + input) * 1000);\n            break;\n        // UNIX TIMESTAMP WITH MS\n        case 'X':\n            config._d = new Date(parseFloat(input) * 1000);\n            break;\n        // TIMEZONE\n        case 'Z' : // fall through to ZZ\n        case 'ZZ' :\n            config._useUTC = true;\n            a = (input + '').match(parseTimezoneChunker);\n            if (a && a[1]) {\n                config._tzh = ~~a[1];\n            }\n            if (a && a[2]) {\n                config._tzm = ~~a[2];\n            }\n            // reverse offsets\n            if (a && a[0] === '+') {\n                config._tzh = -config._tzh;\n                config._tzm = -config._tzm;\n            }\n            break;\n        }\n\n        // if the input is null, the date is not valid\n        if (input == null) {\n            config._isValid = false;\n        }\n    }\n\n    // convert an array to a date.\n    // the array should mirror the parameters below\n    // note: all values past the year are optional and will default to the lowest possible value.\n    // [year, month, day , hour, minute, second, millisecond]\n    function dateFromArray(config) {\n        var i, date, input = [];\n\n        if (config._d) {\n            return;\n        }\n\n        for (i = 0; i < 7; i++) {\n            config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i];\n        }\n\n        // add the offsets to the time to be parsed so that we can have a clean array for checking isValid\n        input[3] += config._tzh || 0;\n        input[4] += config._tzm || 0;\n\n        date = new Date(0);\n\n        if (config._useUTC) {\n            date.setUTCFullYear(input[0], input[1], input[2]);\n            date.setUTCHours(input[3], input[4], input[5], input[6]);\n        } else {\n            date.setFullYear(input[0], input[1], input[2]);\n            date.setHours(input[3], input[4], input[5], input[6]);\n        }\n\n        config._d = date;\n    }\n\n    // date from string and format string\n    function makeDateFromStringAndFormat(config) {\n        // This array is used to make a Date, either with `new Date` or `Date.UTC`\n        var tokens = config._f.match(formattingTokens),\n            string = config._i,\n            i, parsedInput;\n\n        config._a = [];\n\n        for (i = 0; i < tokens.length; i++) {\n            parsedInput = (getParseRegexForToken(tokens[i]).exec(string) || [])[0];\n            if (parsedInput) {\n                string = string.slice(string.indexOf(parsedInput) + parsedInput.length);\n            }\n            // don't parse if its not a known token\n            if (formatTokenFunctions[tokens[i]]) {\n                addTimeToArrayFromToken(tokens[i], parsedInput, config);\n            }\n        }\n        // handle am pm\n        if (config._isPm && config._a[3] < 12) {\n            config._a[3] += 12;\n        }\n        // if is 12 am, change hours to 0\n        if (config._isPm === false && config._a[3] === 12) {\n            config._a[3] = 0;\n        }\n        // return\n        dateFromArray(config);\n    }\n\n    // date from string and array of format strings\n    function makeDateFromStringAndArray(config) {\n        var tempConfig,\n            tempMoment,\n            bestMoment,\n\n            scoreToBeat = 99,\n            i,\n            currentDate,\n            currentScore;\n\n        while (config._f.length) {\n            tempConfig = extend({}, config);\n            tempConfig._f = config._f.pop();\n            makeDateFromStringAndFormat(tempConfig);\n            tempMoment = new Moment(tempConfig);\n\n            if (tempMoment.isValid()) {\n                bestMoment = tempMoment;\n                break;\n            }\n\n            currentScore = compareArrays(tempConfig._a, tempMoment.toArray());\n\n            if (currentScore < scoreToBeat) {\n                scoreToBeat = currentScore;\n                bestMoment = tempMoment;\n            }\n        }\n\n        extend(config, bestMoment);\n    }\n\n    // date from iso format\n    function makeDateFromString(config) {\n        var i,\n            string = config._i;\n        if (isoRegex.exec(string)) {\n            config._f = 'YYYY-MM-DDT';\n            for (i = 0; i < 4; i++) {\n                if (isoTimes[i][1].exec(string)) {\n                    config._f += isoTimes[i][0];\n                    break;\n                }\n            }\n            if (parseTokenTimezone.exec(string)) {\n                config._f += \" Z\";\n            }\n            makeDateFromStringAndFormat(config);\n        } else {\n            config._d = new Date(string);\n        }\n    }\n\n    function makeDateFromInput(config) {\n        var input = config._i,\n            matched = aspNetJsonRegex.exec(input);\n\n        if (input === undefined) {\n            config._d = new Date();\n        } else if (matched) {\n            config._d = new Date(+matched[1]);\n        } else if (typeof input === 'string') {\n            makeDateFromString(config);\n        } else if (isArray(input)) {\n            config._a = input.slice(0);\n            dateFromArray(config);\n        } else {\n            config._d = input instanceof Date ? new Date(+input) : new Date(input);\n        }\n    }\n\n\n    /************************************\n        Relative Time\n    ************************************/\n\n\n    // helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize\n    function substituteTimeAgo(string, number, withoutSuffix, isFuture, lang) {\n        return lang.relativeTime(number || 1, !!withoutSuffix, string, isFuture);\n    }\n\n    function relativeTime(milliseconds, withoutSuffix, lang) {\n        var seconds = round(Math.abs(milliseconds) / 1000),\n            minutes = round(seconds / 60),\n            hours = round(minutes / 60),\n            days = round(hours / 24),\n            years = round(days / 365),\n            args = seconds < 45 && ['s', seconds] ||\n                minutes === 1 && ['m'] ||\n                minutes < 45 && ['mm', minutes] ||\n                hours === 1 && ['h'] ||\n                hours < 22 && ['hh', hours] ||\n                days === 1 && ['d'] ||\n                days <= 25 && ['dd', days] ||\n                days <= 45 && ['M'] ||\n                days < 345 && ['MM', round(days / 30)] ||\n                years === 1 && ['y'] || ['yy', years];\n        args[2] = withoutSuffix;\n        args[3] = milliseconds > 0;\n        args[4] = lang;\n        return substituteTimeAgo.apply({}, args);\n    }\n\n\n    /************************************\n        Week of Year\n    ************************************/\n\n\n    // firstDayOfWeek       0 = sun, 6 = sat\n    //                      the day of the week that starts the week\n    //                      (usually sunday or monday)\n    // firstDayOfWeekOfYear 0 = sun, 6 = sat\n    //                      the first week is the week that contains the first\n    //                      of this day of the week\n    //                      (eg. ISO weeks use thursday (4))\n    function weekOfYear(mom, firstDayOfWeek, firstDayOfWeekOfYear) {\n        var end = firstDayOfWeekOfYear - firstDayOfWeek,\n            daysToDayOfWeek = firstDayOfWeekOfYear - mom.day();\n\n\n        if (daysToDayOfWeek > end) {\n            daysToDayOfWeek -= 7;\n        }\n\n        if (daysToDayOfWeek < end - 7) {\n            daysToDayOfWeek += 7;\n        }\n\n        return Math.ceil(moment(mom).add('d', daysToDayOfWeek).dayOfYear() / 7);\n    }\n\n\n    /************************************\n        Top Level Functions\n    ************************************/\n\n    function makeMoment(config) {\n        var input = config._i,\n            format = config._f;\n\n        if (input === null || input === '') {\n            return null;\n        }\n\n        if (typeof input === 'string') {\n            config._i = input = getLangDefinition().preparse(input);\n        }\n\n        if (moment.isMoment(input)) {\n            config = extend({}, input);\n            config._d = new Date(+input._d);\n        } else if (format) {\n            if (isArray(format)) {\n                makeDateFromStringAndArray(config);\n            } else {\n                makeDateFromStringAndFormat(config);\n            }\n        } else {\n            makeDateFromInput(config);\n        }\n\n        return new Moment(config);\n    }\n\n    moment = function (input, format, lang) {\n        return makeMoment({\n            _i : input,\n            _f : format,\n            _l : lang,\n            _isUTC : false\n        });\n    };\n\n    // creating with utc\n    moment.utc = function (input, format, lang) {\n        return makeMoment({\n            _useUTC : true,\n            _isUTC : true,\n            _l : lang,\n            _i : input,\n            _f : format\n        });\n    };\n\n    // creating with unix timestamp (in seconds)\n    moment.unix = function (input) {\n        return moment(input * 1000);\n    };\n\n    // duration\n    moment.duration = function (input, key) {\n        var isDuration = moment.isDuration(input),\n            isNumber = (typeof input === 'number'),\n            duration = (isDuration ? input._data : (isNumber ? {} : input)),\n            ret;\n\n        if (isNumber) {\n            if (key) {\n                duration[key] = input;\n            } else {\n                duration.milliseconds = input;\n            }\n        }\n\n        ret = new Duration(duration);\n\n        if (isDuration && input.hasOwnProperty('_lang')) {\n            ret._lang = input._lang;\n        }\n\n        return ret;\n    };\n\n    // version number\n    moment.version = VERSION;\n\n    // default format\n    moment.defaultFormat = isoFormat;\n\n    // This function will load languages and then set the global language.  If\n    // no arguments are passed in, it will simply return the current global\n    // language key.\n    moment.lang = function (key, values) {\n        var i;\n\n        if (!key) {\n            return moment.fn._lang._abbr;\n        }\n        if (values) {\n            loadLang(key, values);\n        } else if (!languages[key]) {\n            getLangDefinition(key);\n        }\n        moment.duration.fn._lang = moment.fn._lang = getLangDefinition(key);\n    };\n\n    // returns language data\n    moment.langData = function (key) {\n        if (key && key._lang && key._lang._abbr) {\n            key = key._lang._abbr;\n        }\n        return getLangDefinition(key);\n    };\n\n    // compare moment object\n    moment.isMoment = function (obj) {\n        return obj instanceof Moment;\n    };\n\n    // for typechecking Duration objects\n    moment.isDuration = function (obj) {\n        return obj instanceof Duration;\n    };\n\n\n    /************************************\n        Moment Prototype\n    ************************************/\n\n\n    moment.fn = Moment.prototype = {\n\n        clone : function () {\n            return moment(this);\n        },\n\n        valueOf : function () {\n            return +this._d;\n        },\n\n        unix : function () {\n            return Math.floor(+this._d / 1000);\n        },\n\n        toString : function () {\n            return this.format(\"ddd MMM DD YYYY HH:mm:ss [GMT]ZZ\");\n        },\n\n        toDate : function () {\n            return this._d;\n        },\n\n        toJSON : function () {\n            return moment.utc(this).format('YYYY-MM-DD[T]HH:mm:ss.SSS[Z]');\n        },\n\n        toArray : function () {\n            var m = this;\n            return [\n                m.year(),\n                m.month(),\n                m.date(),\n                m.hours(),\n                m.minutes(),\n                m.seconds(),\n                m.milliseconds()\n            ];\n        },\n\n        isValid : function () {\n            if (this._isValid == null) {\n                if (this._a) {\n                    this._isValid = !compareArrays(this._a, (this._isUTC ? moment.utc(this._a) : moment(this._a)).toArray());\n                } else {\n                    this._isValid = !isNaN(this._d.getTime());\n                }\n            }\n            return !!this._isValid;\n        },\n\n        utc : function () {\n            this._isUTC = true;\n            return this;\n        },\n\n        local : function () {\n            this._isUTC = false;\n            return this;\n        },\n\n        format : function (inputString) {\n            var output = formatMoment(this, inputString || moment.defaultFormat);\n            return this.lang().postformat(output);\n        },\n\n        add : function (input, val) {\n            var dur;\n            // switch args to support add('s', 1) and add(1, 's')\n            if (typeof input === 'string') {\n                dur = moment.duration(+val, input);\n            } else {\n                dur = moment.duration(input, val);\n            }\n            addOrSubtractDurationFromMoment(this, dur, 1);\n            return this;\n        },\n\n        subtract : function (input, val) {\n            var dur;\n            // switch args to support subtract('s', 1) and subtract(1, 's')\n            if (typeof input === 'string') {\n                dur = moment.duration(+val, input);\n            } else {\n                dur = moment.duration(input, val);\n            }\n            addOrSubtractDurationFromMoment(this, dur, -1);\n            return this;\n        },\n\n        diff : function (input, units, asFloat) {\n            var that = this._isUTC ? moment(input).utc() : moment(input).local(),\n                zoneDiff = (this.zone() - that.zone()) * 6e4,\n                diff, output;\n\n            if (units) {\n                // standardize on singular form\n                units = units.replace(/s$/, '');\n            }\n\n            if (units === 'year' || units === 'month') {\n                diff = (this.daysInMonth() + that.daysInMonth()) * 432e5; // 24 * 60 * 60 * 1000 / 2\n                output = ((this.year() - that.year()) * 12) + (this.month() - that.month());\n                output += ((this - moment(this).startOf('month')) - (that - moment(that).startOf('month'))) / diff;\n                if (units === 'year') {\n                    output = output / 12;\n                }\n            } else {\n                diff = (this - that) - zoneDiff;\n                output = units === 'second' ? diff / 1e3 : // 1000\n                    units === 'minute' ? diff / 6e4 : // 1000 * 60\n                    units === 'hour' ? diff / 36e5 : // 1000 * 60 * 60\n                    units === 'day' ? diff / 864e5 : // 1000 * 60 * 60 * 24\n                    units === 'week' ? diff / 6048e5 : // 1000 * 60 * 60 * 24 * 7\n                    diff;\n            }\n            return asFloat ? output : absRound(output);\n        },\n\n        from : function (time, withoutSuffix) {\n            return moment.duration(this.diff(time)).lang(this.lang()._abbr).humanize(!withoutSuffix);\n        },\n\n        fromNow : function (withoutSuffix) {\n            return this.from(moment(), withoutSuffix);\n        },\n\n        calendar : function () {\n            var diff = this.diff(moment().startOf('day'), 'days', true),\n                format = diff < -6 ? 'sameElse' :\n                diff < -1 ? 'lastWeek' :\n                diff < 0 ? 'lastDay' :\n                diff < 1 ? 'sameDay' :\n                diff < 2 ? 'nextDay' :\n                diff < 7 ? 'nextWeek' : 'sameElse';\n            return this.format(this.lang().calendar(format, this));\n        },\n\n        isLeapYear : function () {\n            var year = this.year();\n            return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0;\n        },\n\n        isDST : function () {\n            return (this.zone() < moment([this.year()]).zone() ||\n                this.zone() < moment([this.year(), 5]).zone());\n        },\n\n        day : function (input) {\n            var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay();\n            return input == null ? day :\n                this.add({ d : input - day });\n        },\n\n        startOf: function (units) {\n            units = units.replace(/s$/, '');\n            // the following switch intentionally omits break keywords\n            // to utilize falling through the cases.\n            switch (units) {\n            case 'year':\n                this.month(0);\n                /* falls through */\n            case 'month':\n                this.date(1);\n                /* falls through */\n            case 'week':\n            case 'day':\n                this.hours(0);\n                /* falls through */\n            case 'hour':\n                this.minutes(0);\n                /* falls through */\n            case 'minute':\n                this.seconds(0);\n                /* falls through */\n            case 'second':\n                this.milliseconds(0);\n                /* falls through */\n            }\n\n            // weeks are a special case\n            if (units === 'week') {\n                this.day(0);\n            }\n\n            return this;\n        },\n\n        endOf: function (units) {\n            return this.startOf(units).add(units.replace(/s?$/, 's'), 1).subtract('ms', 1);\n        },\n\n        isAfter: function (input, units) {\n            units = typeof units !== 'undefined' ? units : 'millisecond';\n            return +this.clone().startOf(units) > +moment(input).startOf(units);\n        },\n\n        isBefore: function (input, units) {\n            units = typeof units !== 'undefined' ? units : 'millisecond';\n            return +this.clone().startOf(units) < +moment(input).startOf(units);\n        },\n\n        isSame: function (input, units) {\n            units = typeof units !== 'undefined' ? units : 'millisecond';\n            return +this.clone().startOf(units) === +moment(input).startOf(units);\n        },\n\n        zone : function () {\n            return this._isUTC ? 0 : this._d.getTimezoneOffset();\n        },\n\n        daysInMonth : function () {\n            return moment.utc([this.year(), this.month() + 1, 0]).date();\n        },\n\n        dayOfYear : function (input) {\n            var dayOfYear = round((moment(this).startOf('day') - moment(this).startOf('year')) / 864e5) + 1;\n            return input == null ? dayOfYear : this.add(\"d\", (input - dayOfYear));\n        },\n\n        isoWeek : function (input) {\n            var week = weekOfYear(this, 1, 4);\n            return input == null ? week : this.add(\"d\", (input - week) * 7);\n        },\n\n        week : function (input) {\n            var week = this.lang().week(this);\n            return input == null ? week : this.add(\"d\", (input - week) * 7);\n        },\n\n        // If passed a language key, it will set the language for this\n        // instance.  Otherwise, it will return the language configuration\n        // variables for this instance.\n        lang : function (key) {\n            if (key === undefined) {\n                return this._lang;\n            } else {\n                this._lang = getLangDefinition(key);\n                return this;\n            }\n        }\n    };\n\n    // helper for adding shortcuts\n    function makeGetterAndSetter(name, key) {\n        moment.fn[name] = moment.fn[name + 's'] = function (input) {\n            var utc = this._isUTC ? 'UTC' : '';\n            if (input != null) {\n                this._d['set' + utc + key](input);\n                return this;\n            } else {\n                return this._d['get' + utc + key]();\n            }\n        };\n    }\n\n    // loop through and add shortcuts (Month, Date, Hours, Minutes, Seconds, Milliseconds)\n    for (i = 0; i < proxyGettersAndSetters.length; i ++) {\n        makeGetterAndSetter(proxyGettersAndSetters[i].toLowerCase().replace(/s$/, ''), proxyGettersAndSetters[i]);\n    }\n\n    // add shortcut for year (uses different syntax than the getter/setter 'year' == 'FullYear')\n    makeGetterAndSetter('year', 'FullYear');\n\n    // add plural methods\n    moment.fn.days = moment.fn.day;\n    moment.fn.weeks = moment.fn.week;\n    moment.fn.isoWeeks = moment.fn.isoWeek;\n\n    /************************************\n        Duration Prototype\n    ************************************/\n\n\n    moment.duration.fn = Duration.prototype = {\n        weeks : function () {\n            return absRound(this.days() / 7);\n        },\n\n        valueOf : function () {\n            return this._milliseconds +\n              this._days * 864e5 +\n              this._months * 2592e6;\n        },\n\n        humanize : function (withSuffix) {\n            var difference = +this,\n                output = relativeTime(difference, !withSuffix, this.lang());\n\n            if (withSuffix) {\n                output = this.lang().pastFuture(difference, output);\n            }\n\n            return this.lang().postformat(output);\n        },\n\n        lang : moment.fn.lang\n    };\n\n    function makeDurationGetter(name) {\n        moment.duration.fn[name] = function () {\n            return this._data[name];\n        };\n    }\n\n    function makeDurationAsGetter(name, factor) {\n        moment.duration.fn['as' + name] = function () {\n            return +this / factor;\n        };\n    }\n\n    for (i in unitMillisecondFactors) {\n        if (unitMillisecondFactors.hasOwnProperty(i)) {\n            makeDurationAsGetter(i, unitMillisecondFactors[i]);\n            makeDurationGetter(i.toLowerCase());\n        }\n    }\n\n    makeDurationAsGetter('Weeks', 6048e5);\n\n\n    /************************************\n        Default Lang\n    ************************************/\n\n\n    // Set default language, other languages will inherit from English.\n    moment.lang('en', {\n        ordinal : function (number) {\n            var b = number % 10,\n                output = (~~ (number % 100 / 10) === 1) ? 'th' :\n                (b === 1) ? 'st' :\n                (b === 2) ? 'nd' :\n                (b === 3) ? 'rd' : 'th';\n            return number + output;\n        }\n    });\n\n\n    /************************************\n        Exposing Moment\n    ************************************/\n\n\n    // CommonJS module is defined\n    if (hasModule) {\n        module.exports = moment;\n    }\n    /*global ender:false */\n    if (typeof ender === 'undefined') {\n        // here, `this` means `window` in the browser, or `global` on the server\n        // add `moment` as a global object via a string identifier,\n        // for Closure Compiler \"advanced\" mode\n        this['moment'] = moment;\n    }\n    /*global define:false */\n    if (typeof define === \"function\" && define.amd) {\n        define(\"moment\", [], function () {\n            return moment;\n        });\n    }\n}).call(this);\n"
  },
  {
    "path": "vendor/assets/stylesheets/tablesorter.theme.kochiku.css",
    "content": "/*************\nKochiku table sorter Theme\nRef: http://mottie.github.io/tablesorter/ version 2.8.13\nCopied from the tablesorter default theme and deleted everything but header section\nRemoved font, borter-bottom styling.\n*************/\n\n/* header */\n.tablesorter-default th,\n.tablesorter-default thead td {\n\tcolor: #000;\n\tbackground-color: #fff;\n\tborder-collapse: collapse;\n\tpadding: 0;\n}\n.tablesorter-default tfoot th,\n.tablesorter-default tfoot td {\n\tborder: 0;\n}\n.tablesorter-default .header,\n.tablesorter-default .tablesorter-header {\n\tbackground-image: url(data:image/gif;base64,R0lGODlhFQAJAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAkAAAIXjI+AywnaYnhUMoqt3gZXPmVg94yJVQAAOw==);\n\tbackground-position: center right;\n\tbackground-repeat: no-repeat;\n\tcursor: pointer;\n\twhite-space: normal;\n\tpadding: 4px 20px 4px 4px;\n}\n.tablesorter-default thead .headerSortUp,\n.tablesorter-default thead .tablesorter-headerSortUp,\n.tablesorter-default thead .tablesorter-headerAsc {\n\tbackground-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7);\n}\n.tablesorter-default thead .headerSortDown,\n.tablesorter-default thead .tablesorter-headerSortDown,\n.tablesorter-default thead .tablesorter-headerDesc {\n\tbackground-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAACMtMP///yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7);\n}\n.tablesorter-default thead .sorter-false {\n\tbackground-image: none;\n\tcursor: default;\n\tpadding: 4px;\n}\n\n"
  },
  {
    "path": "vendor/assets/stylesheets/tipTip.scss",
    "content": "/* TipTip CSS - Version 1.2 */\n\n#tiptip_holder {\n\tdisplay: none;\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tz-index: 99999;\n}\n\n#tiptip_holder.tip_top {\n\tpadding-bottom: 5px;\n}\n\n#tiptip_holder.tip_bottom {\n\tpadding-top: 5px;\n}\n\n#tiptip_holder.tip_right {\n\tpadding-left: 5px;\n}\n\n#tiptip_holder.tip_left {\n\tpadding-right: 5px;\n}\n\n#tiptip_content {\n\tfont-size: 11px;\n\tcolor: #fff;\n\ttext-shadow: 0 0 2px #000;\n\tpadding: 4px 8px;\n\tborder: 1px solid rgba(255,255,255,0.25);\n\tbackground-color: rgb(25,25,25);\n\tbackground-color: rgba(25,25,25,0.92);\n\tbackground-image: -webkit-gradient(linear, 0% 0%, 0% 100%, from(transparent), to(#000));\n\tborder-radius: 3px;\n\t-webkit-border-radius: 3px;\n\t-moz-border-radius: 3px;\n\tbox-shadow: 0 0 3px #555;\n\t-webkit-box-shadow: 0 0 3px #555;\n\t-moz-box-shadow: 0 0 3px #555;\n}\n\n#tiptip_arrow, #tiptip_arrow_inner {\n\tposition: absolute;\n\tborder-color: transparent;\n\tborder-style: solid;\n\tborder-width: 6px;\n\theight: 0;\n\twidth: 0;\n}\n\n#tiptip_holder.tip_top #tiptip_arrow {\n\tborder-top-color: #fff;\n\tborder-top-color: rgba(255,255,255,0.35);\n}\n\n#tiptip_holder.tip_bottom #tiptip_arrow {\n\tborder-bottom-color: #fff;\n\tborder-bottom-color: rgba(255,255,255,0.35);\n}\n\n#tiptip_holder.tip_right #tiptip_arrow {\n\tborder-right-color: #fff;\n\tborder-right-color: rgba(255,255,255,0.35);\n}\n\n#tiptip_holder.tip_left #tiptip_arrow {\n\tborder-left-color: #fff;\n\tborder-left-color: rgba(255,255,255,0.35);\n}\n\n#tiptip_holder.tip_top #tiptip_arrow_inner {\n\tmargin-top: -7px;\n\tmargin-left: -6px;\n\tborder-top-color: rgb(25,25,25);\n\tborder-top-color: rgba(25,25,25,0.92);\n}\n\n#tiptip_holder.tip_bottom #tiptip_arrow_inner {\n\tmargin-top: -5px;\n\tmargin-left: -6px;\n\tborder-bottom-color: rgb(25,25,25);\n\tborder-bottom-color: rgba(25,25,25,0.92);\n}\n\n#tiptip_holder.tip_right #tiptip_arrow_inner {\n\tmargin-top: -6px;\n\tmargin-left: -5px;\n\tborder-right-color: rgb(25,25,25);\n\tborder-right-color: rgba(25,25,25,0.92);\n}\n\n#tiptip_holder.tip_left #tiptip_arrow_inner {\n\tmargin-top: -6px;\n\tmargin-left: -7px;\n\tborder-left-color: rgb(25,25,25);\n\tborder-left-color: rgba(25,25,25,0.92);\n}\n\n/* Webkit Hacks  */\n@media screen and (-webkit-min-device-pixel-ratio:0) {\t\n\t#tiptip_content {\n\t\tpadding: 4px 8px 5px 8px;\n\t\tbackground-color: rgba(45,45,45,0.88);\n\t}\n\t#tiptip_holder.tip_bottom #tiptip_arrow_inner { \n\t\tborder-bottom-color: rgba(45,45,45,0.88);\n\t}\n\t#tiptip_holder.tip_top #tiptip_arrow_inner { \n\t\tborder-top-color: rgba(20,20,20,0.92);\n\t}\n}"
  }
]